]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
builtins.c (c_strlen): Use size_diffop and return ssizetype value.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
27
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
31
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
35 not get a hard register.
36
37 Call `put_var_into_stack' when you learn, belatedly, that a variable
38 previously given a pseudo-register must in fact go in the stack.
39 This function changes the DECL_RTL to be a stack slot instead of a reg
40 then scans all the RTL instructions so far generated to correct them. */
41
42 #include "config.h"
43 #include "system.h"
44 #include "rtl.h"
45 #include "tree.h"
46 #include "flags.h"
47 #include "except.h"
48 #include "function.h"
49 #include "insn-flags.h"
50 #include "expr.h"
51 #include "insn-codes.h"
52 #include "regs.h"
53 #include "hard-reg-set.h"
54 #include "insn-config.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "basic-block.h"
58 #include "obstack.h"
59 #include "toplev.h"
60 #include "hash.h"
61 #include "ggc.h"
62 #include "tm_p.h"
63
64 #ifndef TRAMPOLINE_ALIGNMENT
65 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
66 #endif
67
68 #ifndef LOCAL_ALIGNMENT
69 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
70 #endif
71
72 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
73 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
74 #endif
75
76 /* Some systems use __main in a way incompatible with its use in gcc, in these
77 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
78 give the same symbol without quotes for an alternative entry point. You
79 must define both, or neither. */
80 #ifndef NAME__MAIN
81 #define NAME__MAIN "__main"
82 #define SYMBOL__MAIN __main
83 #endif
84
85 /* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
89
90 /* Similar, but round to the next highest integer that meets the
91 alignment. */
92 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
93
94 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
95 during rtl generation. If they are different register numbers, this is
96 always true. It may also be true if
97 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
98 generation. See fix_lexical_addr for details. */
99
100 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
101 #define NEED_SEPARATE_AP
102 #endif
103
104 /* Nonzero if function being compiled doesn't contain any calls
105 (ignoring the prologue and epilogue). This is set prior to
106 local register allocation and is valid for the remaining
107 compiler passes. */
108 int current_function_is_leaf;
109
110 /* Nonzero if function being compiled doesn't modify the stack pointer
111 (ignoring the prologue and epilogue). This is only valid after
112 life_analysis has run. */
113 int current_function_sp_is_unchanging;
114
115 /* Nonzero if the function being compiled is a leaf function which only
116 uses leaf registers. This is valid after reload (specifically after
117 sched2) and is useful only if the port defines LEAF_REGISTERS. */
118 int current_function_uses_only_leaf_regs;
119
120 /* Nonzero once virtual register instantiation has been done.
121 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
122 static int virtuals_instantiated;
123
124 /* These variables hold pointers to functions to
125 save and restore machine-specific data,
126 in push_function_context and pop_function_context. */
127 void (*init_machine_status) PARAMS ((struct function *));
128 void (*save_machine_status) PARAMS ((struct function *));
129 void (*restore_machine_status) PARAMS ((struct function *));
130 void (*mark_machine_status) PARAMS ((struct function *));
131 void (*free_machine_status) PARAMS ((struct function *));
132
133 /* Likewise, but for language-specific data. */
134 void (*init_lang_status) PARAMS ((struct function *));
135 void (*save_lang_status) PARAMS ((struct function *));
136 void (*restore_lang_status) PARAMS ((struct function *));
137 void (*mark_lang_status) PARAMS ((struct function *));
138 void (*free_lang_status) PARAMS ((struct function *));
139
140 /* The FUNCTION_DECL for an inline function currently being expanded. */
141 tree inline_function_decl;
142
143 /* The currently compiled function. */
144 struct function *cfun = 0;
145
146 /* Global list of all compiled functions. */
147 struct function *all_functions = 0;
148
149 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
150 static int *prologue;
151 static int *epilogue;
152 \f
153 /* In order to evaluate some expressions, such as function calls returning
154 structures in memory, we need to temporarily allocate stack locations.
155 We record each allocated temporary in the following structure.
156
157 Associated with each temporary slot is a nesting level. When we pop up
158 one level, all temporaries associated with the previous level are freed.
159 Normally, all temporaries are freed after the execution of the statement
160 in which they were created. However, if we are inside a ({...}) grouping,
161 the result may be in a temporary and hence must be preserved. If the
162 result could be in a temporary, we preserve it if we can determine which
163 one it is in. If we cannot determine which temporary may contain the
164 result, all temporaries are preserved. A temporary is preserved by
165 pretending it was allocated at the previous nesting level.
166
167 Automatic variables are also assigned temporary slots, at the nesting
168 level where they are defined. They are marked a "kept" so that
169 free_temp_slots will not free them. */
170
171 struct temp_slot
172 {
173 /* Points to next temporary slot. */
174 struct temp_slot *next;
175 /* The rtx to used to reference the slot. */
176 rtx slot;
177 /* The rtx used to represent the address if not the address of the
178 slot above. May be an EXPR_LIST if multiple addresses exist. */
179 rtx address;
180 /* The alignment (in bits) of the slot. */
181 int align;
182 /* The size, in units, of the slot. */
183 HOST_WIDE_INT size;
184 /* The alias set for the slot. If the alias set is zero, we don't
185 know anything about the alias set of the slot. We must only
186 reuse a slot if it is assigned an object of the same alias set.
187 Otherwise, the rest of the compiler may assume that the new use
188 of the slot cannot alias the old use of the slot, which is
189 false. If the slot has alias set zero, then we can't reuse the
190 slot at all, since we have no idea what alias set may have been
191 imposed on the memory. For example, if the stack slot is the
192 call frame for an inline functioned, we have no idea what alias
193 sets will be assigned to various pieces of the call frame. */
194 int alias_set;
195 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
196 tree rtl_expr;
197 /* Non-zero if this temporary is currently in use. */
198 char in_use;
199 /* Non-zero if this temporary has its address taken. */
200 char addr_taken;
201 /* Nesting level at which this slot is being used. */
202 int level;
203 /* Non-zero if this should survive a call to free_temp_slots. */
204 int keep;
205 /* The offset of the slot from the frame_pointer, including extra space
206 for alignment. This info is for combine_temp_slots. */
207 HOST_WIDE_INT base_offset;
208 /* The size of the slot, including extra space for alignment. This
209 info is for combine_temp_slots. */
210 HOST_WIDE_INT full_size;
211 };
212 \f
213 /* This structure is used to record MEMs or pseudos used to replace VAR, any
214 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
215 maintain this list in case two operands of an insn were required to match;
216 in that case we must ensure we use the same replacement. */
217
218 struct fixup_replacement
219 {
220 rtx old;
221 rtx new;
222 struct fixup_replacement *next;
223 };
224
225 struct insns_for_mem_entry {
226 /* The KEY in HE will be a MEM. */
227 struct hash_entry he;
228 /* These are the INSNS which reference the MEM. */
229 rtx insns;
230 };
231
232 /* Forward declarations. */
233
234 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
235 int, struct function *));
236 static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode,
237 HOST_WIDE_INT, int, tree));
238 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
239 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
240 enum machine_mode, enum machine_mode,
241 int, int, int, struct hash_table *));
242 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
243 struct hash_table *));
244 static struct fixup_replacement
245 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
246 static void fixup_var_refs_insns PARAMS ((rtx, enum machine_mode, int,
247 rtx, int, struct hash_table *));
248 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
249 struct fixup_replacement **));
250 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
251 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
252 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
253 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
254 static void instantiate_decls PARAMS ((tree, int));
255 static void instantiate_decls_1 PARAMS ((tree, int));
256 static void instantiate_decl PARAMS ((rtx, int, int));
257 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
258 static void delete_handlers PARAMS ((void));
259 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
260 struct args_size *));
261 #ifndef ARGS_GROW_DOWNWARD
262 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
263 tree));
264 #endif
265 #ifdef ARGS_GROW_DOWNWARD
266 static tree round_down PARAMS ((tree, int));
267 #endif
268 static rtx round_trampoline_addr PARAMS ((rtx));
269 static tree blocks_nreverse PARAMS ((tree));
270 static int all_blocks PARAMS ((tree, tree *));
271 /* We always define `record_insns' even if its not used so that we
272 can always export `prologue_epilogue_contains'. */
273 static int *record_insns PARAMS ((rtx)) ATTRIBUTE_UNUSED;
274 static int contains PARAMS ((rtx, int *));
275 #ifdef HAVE_return
276 static void emit_return_into_block PARAMS ((basic_block));
277 #endif
278 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
279 static boolean purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
280 struct hash_table *));
281 static int is_addressof PARAMS ((rtx *, void *));
282 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
283 struct hash_table *,
284 hash_table_key));
285 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
286 static boolean insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
287 static int insns_for_mem_walk PARAMS ((rtx *, void *));
288 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
289 static void mark_temp_slot PARAMS ((struct temp_slot *));
290 static void mark_function_status PARAMS ((struct function *));
291 static void mark_function_chain PARAMS ((void *));
292 static void prepare_function_start PARAMS ((void));
293 static void do_clobber_return_reg PARAMS ((rtx, void *));
294 static void do_use_return_reg PARAMS ((rtx, void *));
295 \f
296 /* Pointer to chain of `struct function' for containing functions. */
297 struct function *outer_function_chain;
298
299 /* Given a function decl for a containing function,
300 return the `struct function' for it. */
301
302 struct function *
303 find_function_data (decl)
304 tree decl;
305 {
306 struct function *p;
307
308 for (p = outer_function_chain; p; p = p->next)
309 if (p->decl == decl)
310 return p;
311
312 abort ();
313 }
314
315 /* Save the current context for compilation of a nested function.
316 This is called from language-specific code. The caller should use
317 the save_lang_status callback to save any language-specific state,
318 since this function knows only about language-independent
319 variables. */
320
321 void
322 push_function_context_to (context)
323 tree context;
324 {
325 struct function *p, *context_data;
326
327 if (context)
328 {
329 context_data = (context == current_function_decl
330 ? cfun
331 : find_function_data (context));
332 context_data->contains_functions = 1;
333 }
334
335 if (cfun == 0)
336 init_dummy_function_start ();
337 p = cfun;
338
339 p->next = outer_function_chain;
340 outer_function_chain = p;
341 p->fixup_var_refs_queue = 0;
342
343 save_tree_status (p);
344 if (save_lang_status)
345 (*save_lang_status) (p);
346 if (save_machine_status)
347 (*save_machine_status) (p);
348
349 cfun = 0;
350 }
351
352 void
353 push_function_context ()
354 {
355 push_function_context_to (current_function_decl);
356 }
357
358 /* Restore the last saved context, at the end of a nested function.
359 This function is called from language-specific code. */
360
361 void
362 pop_function_context_from (context)
363 tree context ATTRIBUTE_UNUSED;
364 {
365 struct function *p = outer_function_chain;
366 struct var_refs_queue *queue;
367 struct var_refs_queue *next;
368
369 cfun = p;
370 outer_function_chain = p->next;
371
372 current_function_decl = p->decl;
373 reg_renumber = 0;
374
375 restore_tree_status (p);
376 restore_emit_status (p);
377
378 if (restore_machine_status)
379 (*restore_machine_status) (p);
380 if (restore_lang_status)
381 (*restore_lang_status) (p);
382
383 /* Finish doing put_var_into_stack for any of our variables
384 which became addressable during the nested function. */
385 for (queue = p->fixup_var_refs_queue; queue; queue = next)
386 {
387 next = queue->next;
388 fixup_var_refs (queue->modified, queue->promoted_mode,
389 queue->unsignedp, 0);
390 free (queue);
391 }
392 p->fixup_var_refs_queue = 0;
393
394 /* Reset variables that have known state during rtx generation. */
395 rtx_equal_function_value_matters = 1;
396 virtuals_instantiated = 0;
397 }
398
399 void
400 pop_function_context ()
401 {
402 pop_function_context_from (current_function_decl);
403 }
404
405 /* Clear out all parts of the state in F that can safely be discarded
406 after the function has been parsed, but not compiled, to let
407 garbage collection reclaim the memory. */
408
409 void
410 free_after_parsing (f)
411 struct function *f;
412 {
413 /* f->expr->forced_labels is used by code generation. */
414 /* f->emit->regno_reg_rtx is used by code generation. */
415 /* f->varasm is used by code generation. */
416 /* f->eh->eh_return_stub_label is used by code generation. */
417
418 if (free_lang_status)
419 (*free_lang_status) (f);
420 free_stmt_status (f);
421 }
422
423 /* Clear out all parts of the state in F that can safely be discarded
424 after the function has been compiled, to let garbage collection
425 reclaim the memory. */
426
427 void
428 free_after_compilation (f)
429 struct function *f;
430 {
431 free_eh_status (f);
432 free_expr_status (f);
433 free_emit_status (f);
434 free_varasm_status (f);
435
436 if (free_machine_status)
437 (*free_machine_status) (f);
438
439 if (f->x_parm_reg_stack_loc)
440 free (f->x_parm_reg_stack_loc);
441
442 f->arg_offset_rtx = NULL;
443 f->return_rtx = NULL;
444 f->internal_arg_pointer = NULL;
445 f->x_nonlocal_labels = NULL;
446 f->x_nonlocal_goto_handler_slots = NULL;
447 f->x_nonlocal_goto_handler_labels = NULL;
448 f->x_nonlocal_goto_stack_level = NULL;
449 f->x_cleanup_label = NULL;
450 f->x_return_label = NULL;
451 f->x_save_expr_regs = NULL;
452 f->x_stack_slot_list = NULL;
453 f->x_rtl_expr_chain = NULL;
454 f->x_tail_recursion_label = NULL;
455 f->x_tail_recursion_reentry = NULL;
456 f->x_arg_pointer_save_area = NULL;
457 f->x_context_display = NULL;
458 f->x_trampoline_list = NULL;
459 f->x_parm_birth_insn = NULL;
460 f->x_last_parm_insn = NULL;
461 f->x_parm_reg_stack_loc = NULL;
462 f->x_temp_slots = NULL;
463 f->fixup_var_refs_queue = NULL;
464 f->original_arg_vector = NULL;
465 f->original_decl_initial = NULL;
466 f->inl_last_parm_insn = NULL;
467 f->epilogue_delay_list = NULL;
468 }
469
470 \f
471 /* Allocate fixed slots in the stack frame of the current function. */
472
473 /* Return size needed for stack frame based on slots so far allocated in
474 function F.
475 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
476 the caller may have to do that. */
477
478 HOST_WIDE_INT
479 get_func_frame_size (f)
480 struct function *f;
481 {
482 #ifdef FRAME_GROWS_DOWNWARD
483 return -f->x_frame_offset;
484 #else
485 return f->x_frame_offset;
486 #endif
487 }
488
489 /* Return size needed for stack frame based on slots so far allocated.
490 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
491 the caller may have to do that. */
492 HOST_WIDE_INT
493 get_frame_size ()
494 {
495 return get_func_frame_size (cfun);
496 }
497
498 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
499 with machine mode MODE.
500
501 ALIGN controls the amount of alignment for the address of the slot:
502 0 means according to MODE,
503 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
504 positive specifies alignment boundary in bits.
505
506 We do not round to stack_boundary here.
507
508 FUNCTION specifies the function to allocate in. */
509
510 static rtx
511 assign_stack_local_1 (mode, size, align, function)
512 enum machine_mode mode;
513 HOST_WIDE_INT size;
514 int align;
515 struct function *function;
516 {
517 register rtx x, addr;
518 int bigend_correction = 0;
519 int alignment;
520
521 /* Allocate in the memory associated with the function in whose frame
522 we are assigning. */
523 if (function != cfun)
524 push_obstacks (function->function_obstack,
525 function->function_maybepermanent_obstack);
526
527 if (align == 0)
528 {
529 tree type;
530
531 alignment = GET_MODE_ALIGNMENT (mode);
532 if (mode == BLKmode)
533 alignment = BIGGEST_ALIGNMENT;
534
535 /* Allow the target to (possibly) increase the alignment of this
536 stack slot. */
537 type = type_for_mode (mode, 0);
538 if (type)
539 alignment = LOCAL_ALIGNMENT (type, alignment);
540
541 alignment /= BITS_PER_UNIT;
542 }
543 else if (align == -1)
544 {
545 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
546 size = CEIL_ROUND (size, alignment);
547 }
548 else
549 alignment = align / BITS_PER_UNIT;
550
551 #ifdef FRAME_GROWS_DOWNWARD
552 function->x_frame_offset -= size;
553 #endif
554
555 /* Ignore alignment we can't do with expected alignment of the boundary. */
556 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
557 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
558
559 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
560 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
561
562 /* Round frame offset to that alignment.
563 We must be careful here, since FRAME_OFFSET might be negative and
564 division with a negative dividend isn't as well defined as we might
565 like. So we instead assume that ALIGNMENT is a power of two and
566 use logical operations which are unambiguous. */
567 #ifdef FRAME_GROWS_DOWNWARD
568 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
569 #else
570 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
571 #endif
572
573 /* On a big-endian machine, if we are allocating more space than we will use,
574 use the least significant bytes of those that are allocated. */
575 if (BYTES_BIG_ENDIAN && mode != BLKmode)
576 bigend_correction = size - GET_MODE_SIZE (mode);
577
578 /* If we have already instantiated virtual registers, return the actual
579 address relative to the frame pointer. */
580 if (function == cfun && virtuals_instantiated)
581 addr = plus_constant (frame_pointer_rtx,
582 (frame_offset + bigend_correction
583 + STARTING_FRAME_OFFSET));
584 else
585 addr = plus_constant (virtual_stack_vars_rtx,
586 function->x_frame_offset + bigend_correction);
587
588 #ifndef FRAME_GROWS_DOWNWARD
589 function->x_frame_offset += size;
590 #endif
591
592 x = gen_rtx_MEM (mode, addr);
593
594 function->x_stack_slot_list
595 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
596
597 if (function != cfun)
598 pop_obstacks ();
599
600 return x;
601 }
602
603 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
604 current function. */
605 rtx
606 assign_stack_local (mode, size, align)
607 enum machine_mode mode;
608 HOST_WIDE_INT size;
609 int align;
610 {
611 return assign_stack_local_1 (mode, size, align, cfun);
612 }
613 \f
614 /* Allocate a temporary stack slot and record it for possible later
615 reuse.
616
617 MODE is the machine mode to be given to the returned rtx.
618
619 SIZE is the size in units of the space required. We do no rounding here
620 since assign_stack_local will do any required rounding.
621
622 KEEP is 1 if this slot is to be retained after a call to
623 free_temp_slots. Automatic variables for a block are allocated
624 with this flag. KEEP is 2 if we allocate a longer term temporary,
625 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
626 if we are to allocate something at an inner level to be treated as
627 a variable in the block (e.g., a SAVE_EXPR).
628
629 TYPE is the type that will be used for the stack slot. */
630
631 static rtx
632 assign_stack_temp_for_type (mode, size, keep, type)
633 enum machine_mode mode;
634 HOST_WIDE_INT size;
635 int keep;
636 tree type;
637 {
638 int align;
639 int alias_set;
640 struct temp_slot *p, *best_p = 0;
641
642 /* If SIZE is -1 it means that somebody tried to allocate a temporary
643 of a variable size. */
644 if (size == -1)
645 abort ();
646
647 /* If we know the alias set for the memory that will be used, use
648 it. If there's no TYPE, then we don't know anything about the
649 alias set for the memory. */
650 if (type)
651 alias_set = get_alias_set (type);
652 else
653 alias_set = 0;
654
655 align = GET_MODE_ALIGNMENT (mode);
656 if (mode == BLKmode)
657 align = BIGGEST_ALIGNMENT;
658
659 if (! type)
660 type = type_for_mode (mode, 0);
661 if (type)
662 align = LOCAL_ALIGNMENT (type, align);
663
664 /* Try to find an available, already-allocated temporary of the proper
665 mode which meets the size and alignment requirements. Choose the
666 smallest one with the closest alignment. */
667 for (p = temp_slots; p; p = p->next)
668 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
669 && ! p->in_use
670 && (!flag_strict_aliasing
671 || (alias_set && p->alias_set == alias_set))
672 && (best_p == 0 || best_p->size > p->size
673 || (best_p->size == p->size && best_p->align > p->align)))
674 {
675 if (p->align == align && p->size == size)
676 {
677 best_p = 0;
678 break;
679 }
680 best_p = p;
681 }
682
683 /* Make our best, if any, the one to use. */
684 if (best_p)
685 {
686 /* If there are enough aligned bytes left over, make them into a new
687 temp_slot so that the extra bytes don't get wasted. Do this only
688 for BLKmode slots, so that we can be sure of the alignment. */
689 if (GET_MODE (best_p->slot) == BLKmode
690 /* We can't split slots if -fstrict-aliasing because the
691 information about the alias set for the new slot will be
692 lost. */
693 && !flag_strict_aliasing)
694 {
695 int alignment = best_p->align / BITS_PER_UNIT;
696 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
697
698 if (best_p->size - rounded_size >= alignment)
699 {
700 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
701 p->in_use = p->addr_taken = 0;
702 p->size = best_p->size - rounded_size;
703 p->base_offset = best_p->base_offset + rounded_size;
704 p->full_size = best_p->full_size - rounded_size;
705 p->slot = gen_rtx_MEM (BLKmode,
706 plus_constant (XEXP (best_p->slot, 0),
707 rounded_size));
708 p->align = best_p->align;
709 p->address = 0;
710 p->rtl_expr = 0;
711 p->next = temp_slots;
712 temp_slots = p;
713
714 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
715 stack_slot_list);
716
717 best_p->size = rounded_size;
718 best_p->full_size = rounded_size;
719 }
720 }
721
722 p = best_p;
723 }
724
725 /* If we still didn't find one, make a new temporary. */
726 if (p == 0)
727 {
728 HOST_WIDE_INT frame_offset_old = frame_offset;
729
730 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
731
732 /* We are passing an explicit alignment request to assign_stack_local.
733 One side effect of that is assign_stack_local will not round SIZE
734 to ensure the frame offset remains suitably aligned.
735
736 So for requests which depended on the rounding of SIZE, we go ahead
737 and round it now. We also make sure ALIGNMENT is at least
738 BIGGEST_ALIGNMENT. */
739 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
740 abort();
741 p->slot = assign_stack_local (mode,
742 (mode == BLKmode
743 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
744 : size),
745 align);
746
747 p->align = align;
748 p->alias_set = alias_set;
749
750 /* The following slot size computation is necessary because we don't
751 know the actual size of the temporary slot until assign_stack_local
752 has performed all the frame alignment and size rounding for the
753 requested temporary. Note that extra space added for alignment
754 can be either above or below this stack slot depending on which
755 way the frame grows. We include the extra space if and only if it
756 is above this slot. */
757 #ifdef FRAME_GROWS_DOWNWARD
758 p->size = frame_offset_old - frame_offset;
759 #else
760 p->size = size;
761 #endif
762
763 /* Now define the fields used by combine_temp_slots. */
764 #ifdef FRAME_GROWS_DOWNWARD
765 p->base_offset = frame_offset;
766 p->full_size = frame_offset_old - frame_offset;
767 #else
768 p->base_offset = frame_offset_old;
769 p->full_size = frame_offset - frame_offset_old;
770 #endif
771 p->address = 0;
772 p->next = temp_slots;
773 temp_slots = p;
774 }
775
776 p->in_use = 1;
777 p->addr_taken = 0;
778 p->rtl_expr = seq_rtl_expr;
779
780 if (keep == 2)
781 {
782 p->level = target_temp_slot_level;
783 p->keep = 0;
784 }
785 else if (keep == 3)
786 {
787 p->level = var_temp_slot_level;
788 p->keep = 0;
789 }
790 else
791 {
792 p->level = temp_slot_level;
793 p->keep = keep;
794 }
795
796 /* We may be reusing an old slot, so clear any MEM flags that may have been
797 set from before. */
798 RTX_UNCHANGING_P (p->slot) = 0;
799 MEM_IN_STRUCT_P (p->slot) = 0;
800 MEM_SCALAR_P (p->slot) = 0;
801 MEM_ALIAS_SET (p->slot) = 0;
802 return p->slot;
803 }
804
805 /* Allocate a temporary stack slot and record it for possible later
806 reuse. First three arguments are same as in preceding function. */
807
808 rtx
809 assign_stack_temp (mode, size, keep)
810 enum machine_mode mode;
811 HOST_WIDE_INT size;
812 int keep;
813 {
814 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
815 }
816 \f
817 /* Assign a temporary of given TYPE.
818 KEEP is as for assign_stack_temp.
819 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
820 it is 0 if a register is OK.
821 DONT_PROMOTE is 1 if we should not promote values in register
822 to wider modes. */
823
824 rtx
825 assign_temp (type, keep, memory_required, dont_promote)
826 tree type;
827 int keep;
828 int memory_required;
829 int dont_promote ATTRIBUTE_UNUSED;
830 {
831 enum machine_mode mode = TYPE_MODE (type);
832 #ifndef PROMOTE_FOR_CALL_ONLY
833 int unsignedp = TREE_UNSIGNED (type);
834 #endif
835
836 if (mode == BLKmode || memory_required)
837 {
838 HOST_WIDE_INT size = int_size_in_bytes (type);
839 rtx tmp;
840
841 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
842 problems with allocating the stack space. */
843 if (size == 0)
844 size = 1;
845
846 /* Unfortunately, we don't yet know how to allocate variable-sized
847 temporaries. However, sometimes we have a fixed upper limit on
848 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
849 instead. This is the case for Chill variable-sized strings. */
850 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
851 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
852 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
853 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
854
855 tmp = assign_stack_temp_for_type (mode, size, keep, type);
856 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
857 return tmp;
858 }
859
860 #ifndef PROMOTE_FOR_CALL_ONLY
861 if (! dont_promote)
862 mode = promote_mode (type, mode, &unsignedp, 0);
863 #endif
864
865 return gen_reg_rtx (mode);
866 }
867 \f
868 /* Combine temporary stack slots which are adjacent on the stack.
869
870 This allows for better use of already allocated stack space. This is only
871 done for BLKmode slots because we can be sure that we won't have alignment
872 problems in this case. */
873
874 void
875 combine_temp_slots ()
876 {
877 struct temp_slot *p, *q;
878 struct temp_slot *prev_p, *prev_q;
879 int num_slots;
880
881 /* We can't combine slots, because the information about which slot
882 is in which alias set will be lost. */
883 if (flag_strict_aliasing)
884 return;
885
886 /* If there are a lot of temp slots, don't do anything unless
887 high levels of optimizaton. */
888 if (! flag_expensive_optimizations)
889 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
890 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
891 return;
892
893 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
894 {
895 int delete_p = 0;
896
897 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
898 for (q = p->next, prev_q = p; q; q = prev_q->next)
899 {
900 int delete_q = 0;
901 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
902 {
903 if (p->base_offset + p->full_size == q->base_offset)
904 {
905 /* Q comes after P; combine Q into P. */
906 p->size += q->size;
907 p->full_size += q->full_size;
908 delete_q = 1;
909 }
910 else if (q->base_offset + q->full_size == p->base_offset)
911 {
912 /* P comes after Q; combine P into Q. */
913 q->size += p->size;
914 q->full_size += p->full_size;
915 delete_p = 1;
916 break;
917 }
918 }
919 /* Either delete Q or advance past it. */
920 if (delete_q)
921 prev_q->next = q->next;
922 else
923 prev_q = q;
924 }
925 /* Either delete P or advance past it. */
926 if (delete_p)
927 {
928 if (prev_p)
929 prev_p->next = p->next;
930 else
931 temp_slots = p->next;
932 }
933 else
934 prev_p = p;
935 }
936 }
937 \f
938 /* Find the temp slot corresponding to the object at address X. */
939
940 static struct temp_slot *
941 find_temp_slot_from_address (x)
942 rtx x;
943 {
944 struct temp_slot *p;
945 rtx next;
946
947 for (p = temp_slots; p; p = p->next)
948 {
949 if (! p->in_use)
950 continue;
951
952 else if (XEXP (p->slot, 0) == x
953 || p->address == x
954 || (GET_CODE (x) == PLUS
955 && XEXP (x, 0) == virtual_stack_vars_rtx
956 && GET_CODE (XEXP (x, 1)) == CONST_INT
957 && INTVAL (XEXP (x, 1)) >= p->base_offset
958 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
959 return p;
960
961 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
962 for (next = p->address; next; next = XEXP (next, 1))
963 if (XEXP (next, 0) == x)
964 return p;
965 }
966
967 /* If we have a sum involving a register, see if it points to a temp
968 slot. */
969 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
970 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
971 return p;
972 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
973 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
974 return p;
975
976 return 0;
977 }
978
979 /* Indicate that NEW is an alternate way of referring to the temp slot
980 that previously was known by OLD. */
981
982 void
983 update_temp_slot_address (old, new)
984 rtx old, new;
985 {
986 struct temp_slot *p;
987
988 if (rtx_equal_p (old, new))
989 return;
990
991 p = find_temp_slot_from_address (old);
992
993 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
994 is a register, see if one operand of the PLUS is a temporary
995 location. If so, NEW points into it. Otherwise, if both OLD and
996 NEW are a PLUS and if there is a register in common between them.
997 If so, try a recursive call on those values. */
998 if (p == 0)
999 {
1000 if (GET_CODE (old) != PLUS)
1001 return;
1002
1003 if (GET_CODE (new) == REG)
1004 {
1005 update_temp_slot_address (XEXP (old, 0), new);
1006 update_temp_slot_address (XEXP (old, 1), new);
1007 return;
1008 }
1009 else if (GET_CODE (new) != PLUS)
1010 return;
1011
1012 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1013 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1014 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1015 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1016 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1017 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1018 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1019 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1020
1021 return;
1022 }
1023
1024 /* Otherwise add an alias for the temp's address. */
1025 else if (p->address == 0)
1026 p->address = new;
1027 else
1028 {
1029 if (GET_CODE (p->address) != EXPR_LIST)
1030 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1031
1032 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1033 }
1034 }
1035
1036 /* If X could be a reference to a temporary slot, mark the fact that its
1037 address was taken. */
1038
1039 void
1040 mark_temp_addr_taken (x)
1041 rtx x;
1042 {
1043 struct temp_slot *p;
1044
1045 if (x == 0)
1046 return;
1047
1048 /* If X is not in memory or is at a constant address, it cannot be in
1049 a temporary slot. */
1050 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1051 return;
1052
1053 p = find_temp_slot_from_address (XEXP (x, 0));
1054 if (p != 0)
1055 p->addr_taken = 1;
1056 }
1057
1058 /* If X could be a reference to a temporary slot, mark that slot as
1059 belonging to the to one level higher than the current level. If X
1060 matched one of our slots, just mark that one. Otherwise, we can't
1061 easily predict which it is, so upgrade all of them. Kept slots
1062 need not be touched.
1063
1064 This is called when an ({...}) construct occurs and a statement
1065 returns a value in memory. */
1066
1067 void
1068 preserve_temp_slots (x)
1069 rtx x;
1070 {
1071 struct temp_slot *p = 0;
1072
1073 /* If there is no result, we still might have some objects whose address
1074 were taken, so we need to make sure they stay around. */
1075 if (x == 0)
1076 {
1077 for (p = temp_slots; p; p = p->next)
1078 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1079 p->level--;
1080
1081 return;
1082 }
1083
1084 /* If X is a register that is being used as a pointer, see if we have
1085 a temporary slot we know it points to. To be consistent with
1086 the code below, we really should preserve all non-kept slots
1087 if we can't find a match, but that seems to be much too costly. */
1088 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1089 p = find_temp_slot_from_address (x);
1090
1091 /* If X is not in memory or is at a constant address, it cannot be in
1092 a temporary slot, but it can contain something whose address was
1093 taken. */
1094 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1095 {
1096 for (p = temp_slots; p; p = p->next)
1097 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1098 p->level--;
1099
1100 return;
1101 }
1102
1103 /* First see if we can find a match. */
1104 if (p == 0)
1105 p = find_temp_slot_from_address (XEXP (x, 0));
1106
1107 if (p != 0)
1108 {
1109 /* Move everything at our level whose address was taken to our new
1110 level in case we used its address. */
1111 struct temp_slot *q;
1112
1113 if (p->level == temp_slot_level)
1114 {
1115 for (q = temp_slots; q; q = q->next)
1116 if (q != p && q->addr_taken && q->level == p->level)
1117 q->level--;
1118
1119 p->level--;
1120 p->addr_taken = 0;
1121 }
1122 return;
1123 }
1124
1125 /* Otherwise, preserve all non-kept slots at this level. */
1126 for (p = temp_slots; p; p = p->next)
1127 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1128 p->level--;
1129 }
1130
1131 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1132 with that RTL_EXPR, promote it into a temporary slot at the present
1133 level so it will not be freed when we free slots made in the
1134 RTL_EXPR. */
1135
1136 void
1137 preserve_rtl_expr_result (x)
1138 rtx x;
1139 {
1140 struct temp_slot *p;
1141
1142 /* If X is not in memory or is at a constant address, it cannot be in
1143 a temporary slot. */
1144 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1145 return;
1146
1147 /* If we can find a match, move it to our level unless it is already at
1148 an upper level. */
1149 p = find_temp_slot_from_address (XEXP (x, 0));
1150 if (p != 0)
1151 {
1152 p->level = MIN (p->level, temp_slot_level);
1153 p->rtl_expr = 0;
1154 }
1155
1156 return;
1157 }
1158
1159 /* Free all temporaries used so far. This is normally called at the end
1160 of generating code for a statement. Don't free any temporaries
1161 currently in use for an RTL_EXPR that hasn't yet been emitted.
1162 We could eventually do better than this since it can be reused while
1163 generating the same RTL_EXPR, but this is complex and probably not
1164 worthwhile. */
1165
1166 void
1167 free_temp_slots ()
1168 {
1169 struct temp_slot *p;
1170
1171 for (p = temp_slots; p; p = p->next)
1172 if (p->in_use && p->level == temp_slot_level && ! p->keep
1173 && p->rtl_expr == 0)
1174 p->in_use = 0;
1175
1176 combine_temp_slots ();
1177 }
1178
1179 /* Free all temporary slots used in T, an RTL_EXPR node. */
1180
1181 void
1182 free_temps_for_rtl_expr (t)
1183 tree t;
1184 {
1185 struct temp_slot *p;
1186
1187 for (p = temp_slots; p; p = p->next)
1188 if (p->rtl_expr == t)
1189 p->in_use = 0;
1190
1191 combine_temp_slots ();
1192 }
1193
1194 /* Mark all temporaries ever allocated in this function as not suitable
1195 for reuse until the current level is exited. */
1196
1197 void
1198 mark_all_temps_used ()
1199 {
1200 struct temp_slot *p;
1201
1202 for (p = temp_slots; p; p = p->next)
1203 {
1204 p->in_use = p->keep = 1;
1205 p->level = MIN (p->level, temp_slot_level);
1206 }
1207 }
1208
1209 /* Push deeper into the nesting level for stack temporaries. */
1210
1211 void
1212 push_temp_slots ()
1213 {
1214 temp_slot_level++;
1215 }
1216
1217 /* Likewise, but save the new level as the place to allocate variables
1218 for blocks. */
1219
1220 #if 0
1221 void
1222 push_temp_slots_for_block ()
1223 {
1224 push_temp_slots ();
1225
1226 var_temp_slot_level = temp_slot_level;
1227 }
1228
1229 /* Likewise, but save the new level as the place to allocate temporaries
1230 for TARGET_EXPRs. */
1231
1232 void
1233 push_temp_slots_for_target ()
1234 {
1235 push_temp_slots ();
1236
1237 target_temp_slot_level = temp_slot_level;
1238 }
1239
1240 /* Set and get the value of target_temp_slot_level. The only
1241 permitted use of these functions is to save and restore this value. */
1242
1243 int
1244 get_target_temp_slot_level ()
1245 {
1246 return target_temp_slot_level;
1247 }
1248
1249 void
1250 set_target_temp_slot_level (level)
1251 int level;
1252 {
1253 target_temp_slot_level = level;
1254 }
1255 #endif
1256
1257 /* Pop a temporary nesting level. All slots in use in the current level
1258 are freed. */
1259
1260 void
1261 pop_temp_slots ()
1262 {
1263 struct temp_slot *p;
1264
1265 for (p = temp_slots; p; p = p->next)
1266 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1267 p->in_use = 0;
1268
1269 combine_temp_slots ();
1270
1271 temp_slot_level--;
1272 }
1273
1274 /* Initialize temporary slots. */
1275
1276 void
1277 init_temp_slots ()
1278 {
1279 /* We have not allocated any temporaries yet. */
1280 temp_slots = 0;
1281 temp_slot_level = 0;
1282 var_temp_slot_level = 0;
1283 target_temp_slot_level = 0;
1284 }
1285 \f
1286 /* Retroactively move an auto variable from a register to a stack slot.
1287 This is done when an address-reference to the variable is seen. */
1288
1289 void
1290 put_var_into_stack (decl)
1291 tree decl;
1292 {
1293 register rtx reg;
1294 enum machine_mode promoted_mode, decl_mode;
1295 struct function *function = 0;
1296 tree context;
1297 int can_use_addressof;
1298
1299 context = decl_function_context (decl);
1300
1301 /* Get the current rtl used for this object and its original mode. */
1302 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1303
1304 /* No need to do anything if decl has no rtx yet
1305 since in that case caller is setting TREE_ADDRESSABLE
1306 and a stack slot will be assigned when the rtl is made. */
1307 if (reg == 0)
1308 return;
1309
1310 /* Get the declared mode for this object. */
1311 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1312 : DECL_MODE (decl));
1313 /* Get the mode it's actually stored in. */
1314 promoted_mode = GET_MODE (reg);
1315
1316 /* If this variable comes from an outer function,
1317 find that function's saved context. */
1318 if (context != current_function_decl && context != inline_function_decl)
1319 for (function = outer_function_chain; function; function = function->next)
1320 if (function->decl == context)
1321 break;
1322
1323 /* If this is a variable-size object with a pseudo to address it,
1324 put that pseudo into the stack, if the var is nonlocal. */
1325 if (DECL_NONLOCAL (decl)
1326 && GET_CODE (reg) == MEM
1327 && GET_CODE (XEXP (reg, 0)) == REG
1328 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1329 {
1330 reg = XEXP (reg, 0);
1331 decl_mode = promoted_mode = GET_MODE (reg);
1332 }
1333
1334 can_use_addressof
1335 = (function == 0
1336 && optimize > 0
1337 /* FIXME make it work for promoted modes too */
1338 && decl_mode == promoted_mode
1339 #ifdef NON_SAVING_SETJMP
1340 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1341 #endif
1342 );
1343
1344 /* If we can't use ADDRESSOF, make sure we see through one we already
1345 generated. */
1346 if (! can_use_addressof && GET_CODE (reg) == MEM
1347 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1348 reg = XEXP (XEXP (reg, 0), 0);
1349
1350 /* Now we should have a value that resides in one or more pseudo regs. */
1351
1352 if (GET_CODE (reg) == REG)
1353 {
1354 /* If this variable lives in the current function and we don't need
1355 to put things in the stack for the sake of setjmp, try to keep it
1356 in a register until we know we actually need the address. */
1357 if (can_use_addressof)
1358 gen_mem_addressof (reg, decl);
1359 else
1360 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1361 promoted_mode, decl_mode,
1362 TREE_SIDE_EFFECTS (decl), 0,
1363 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1364 0);
1365 }
1366 else if (GET_CODE (reg) == CONCAT)
1367 {
1368 /* A CONCAT contains two pseudos; put them both in the stack.
1369 We do it so they end up consecutive. */
1370 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1371 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1372 #ifdef FRAME_GROWS_DOWNWARD
1373 /* Since part 0 should have a lower address, do it second. */
1374 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1375 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1376 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1377 0);
1378 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1379 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1380 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1381 0);
1382 #else
1383 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1384 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1385 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1386 0);
1387 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1388 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1389 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1390 0);
1391 #endif
1392
1393 /* Change the CONCAT into a combined MEM for both parts. */
1394 PUT_CODE (reg, MEM);
1395 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1396 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1397
1398 /* The two parts are in memory order already.
1399 Use the lower parts address as ours. */
1400 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1401 /* Prevent sharing of rtl that might lose. */
1402 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1403 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1404 }
1405 else
1406 return;
1407
1408 if (current_function_check_memory_usage)
1409 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1410 XEXP (reg, 0), Pmode,
1411 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1412 TYPE_MODE (sizetype),
1413 GEN_INT (MEMORY_USE_RW),
1414 TYPE_MODE (integer_type_node));
1415 }
1416
1417 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1418 into the stack frame of FUNCTION (0 means the current function).
1419 DECL_MODE is the machine mode of the user-level data type.
1420 PROMOTED_MODE is the machine mode of the register.
1421 VOLATILE_P is nonzero if this is for a "volatile" decl.
1422 USED_P is nonzero if this reg might have already been used in an insn. */
1423
1424 static void
1425 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1426 original_regno, used_p, ht)
1427 struct function *function;
1428 rtx reg;
1429 tree type;
1430 enum machine_mode promoted_mode, decl_mode;
1431 int volatile_p;
1432 int original_regno;
1433 int used_p;
1434 struct hash_table *ht;
1435 {
1436 struct function *func = function ? function : cfun;
1437 rtx new = 0;
1438 int regno = original_regno;
1439
1440 if (regno == 0)
1441 regno = REGNO (reg);
1442
1443 if (regno < func->x_max_parm_reg)
1444 new = func->x_parm_reg_stack_loc[regno];
1445 if (new == 0)
1446 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1447
1448 PUT_CODE (reg, MEM);
1449 PUT_MODE (reg, decl_mode);
1450 XEXP (reg, 0) = XEXP (new, 0);
1451 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1452 MEM_VOLATILE_P (reg) = volatile_p;
1453
1454 /* If this is a memory ref that contains aggregate components,
1455 mark it as such for cse and loop optimize. If we are reusing a
1456 previously generated stack slot, then we need to copy the bit in
1457 case it was set for other reasons. For instance, it is set for
1458 __builtin_va_alist. */
1459 MEM_SET_IN_STRUCT_P (reg,
1460 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1461 MEM_ALIAS_SET (reg) = get_alias_set (type);
1462
1463 /* Now make sure that all refs to the variable, previously made
1464 when it was a register, are fixed up to be valid again. */
1465
1466 if (used_p && function != 0)
1467 {
1468 struct var_refs_queue *temp;
1469
1470 temp
1471 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1472 temp->modified = reg;
1473 temp->promoted_mode = promoted_mode;
1474 temp->unsignedp = TREE_UNSIGNED (type);
1475 temp->next = function->fixup_var_refs_queue;
1476 function->fixup_var_refs_queue = temp;
1477 }
1478 else if (used_p)
1479 /* Variable is local; fix it up now. */
1480 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1481 }
1482 \f
1483 static void
1484 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1485 rtx var;
1486 enum machine_mode promoted_mode;
1487 int unsignedp;
1488 struct hash_table *ht;
1489 {
1490 tree pending;
1491 rtx first_insn = get_insns ();
1492 struct sequence_stack *stack = seq_stack;
1493 tree rtl_exps = rtl_expr_chain;
1494
1495 /* Must scan all insns for stack-refs that exceed the limit. */
1496 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1497 stack == 0, ht);
1498 /* If there's a hash table, it must record all uses of VAR. */
1499 if (ht)
1500 return;
1501
1502 /* Scan all pending sequences too. */
1503 for (; stack; stack = stack->next)
1504 {
1505 push_to_sequence (stack->first);
1506 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1507 stack->first, stack->next != 0, 0);
1508 /* Update remembered end of sequence
1509 in case we added an insn at the end. */
1510 stack->last = get_last_insn ();
1511 end_sequence ();
1512 }
1513
1514 /* Scan all waiting RTL_EXPRs too. */
1515 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1516 {
1517 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1518 if (seq != const0_rtx && seq != 0)
1519 {
1520 push_to_sequence (seq);
1521 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1522 0);
1523 end_sequence ();
1524 }
1525 }
1526
1527 /* Scan the catch clauses for exception handling too. */
1528 push_to_sequence (catch_clauses);
1529 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1530 0, 0);
1531 end_sequence ();
1532 }
1533 \f
1534 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1535 some part of an insn. Return a struct fixup_replacement whose OLD
1536 value is equal to X. Allocate a new structure if no such entry exists. */
1537
1538 static struct fixup_replacement *
1539 find_fixup_replacement (replacements, x)
1540 struct fixup_replacement **replacements;
1541 rtx x;
1542 {
1543 struct fixup_replacement *p;
1544
1545 /* See if we have already replaced this. */
1546 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1547 ;
1548
1549 if (p == 0)
1550 {
1551 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1552 p->old = x;
1553 p->new = 0;
1554 p->next = *replacements;
1555 *replacements = p;
1556 }
1557
1558 return p;
1559 }
1560
1561 /* Scan the insn-chain starting with INSN for refs to VAR
1562 and fix them up. TOPLEVEL is nonzero if this chain is the
1563 main chain of insns for the current function. */
1564
1565 static void
1566 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1567 rtx var;
1568 enum machine_mode promoted_mode;
1569 int unsignedp;
1570 rtx insn;
1571 int toplevel;
1572 struct hash_table *ht;
1573 {
1574 rtx call_dest = 0;
1575 rtx insn_list = NULL_RTX;
1576
1577 /* If we already know which INSNs reference VAR there's no need
1578 to walk the entire instruction chain. */
1579 if (ht)
1580 {
1581 insn_list = ((struct insns_for_mem_entry *)
1582 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1583 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1584 insn_list = XEXP (insn_list, 1);
1585 }
1586
1587 while (insn)
1588 {
1589 rtx next = NEXT_INSN (insn);
1590 rtx set, prev, prev_set;
1591 rtx note;
1592
1593 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1594 {
1595 /* Remember the notes in case we delete the insn. */
1596 note = REG_NOTES (insn);
1597
1598 /* If this is a CLOBBER of VAR, delete it.
1599
1600 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1601 and REG_RETVAL notes too. */
1602 if (GET_CODE (PATTERN (insn)) == CLOBBER
1603 && (XEXP (PATTERN (insn), 0) == var
1604 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1605 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1606 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1607 {
1608 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1609 /* The REG_LIBCALL note will go away since we are going to
1610 turn INSN into a NOTE, so just delete the
1611 corresponding REG_RETVAL note. */
1612 remove_note (XEXP (note, 0),
1613 find_reg_note (XEXP (note, 0), REG_RETVAL,
1614 NULL_RTX));
1615
1616 /* In unoptimized compilation, we shouldn't call delete_insn
1617 except in jump.c doing warnings. */
1618 PUT_CODE (insn, NOTE);
1619 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1620 NOTE_SOURCE_FILE (insn) = 0;
1621 }
1622
1623 /* The insn to load VAR from a home in the arglist
1624 is now a no-op. When we see it, just delete it.
1625 Similarly if this is storing VAR from a register from which
1626 it was loaded in the previous insn. This will occur
1627 when an ADDRESSOF was made for an arglist slot. */
1628 else if (toplevel
1629 && (set = single_set (insn)) != 0
1630 && SET_DEST (set) == var
1631 /* If this represents the result of an insn group,
1632 don't delete the insn. */
1633 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1634 && (rtx_equal_p (SET_SRC (set), var)
1635 || (GET_CODE (SET_SRC (set)) == REG
1636 && (prev = prev_nonnote_insn (insn)) != 0
1637 && (prev_set = single_set (prev)) != 0
1638 && SET_DEST (prev_set) == SET_SRC (set)
1639 && rtx_equal_p (SET_SRC (prev_set), var))))
1640 {
1641 /* In unoptimized compilation, we shouldn't call delete_insn
1642 except in jump.c doing warnings. */
1643 PUT_CODE (insn, NOTE);
1644 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1645 NOTE_SOURCE_FILE (insn) = 0;
1646 if (insn == last_parm_insn)
1647 last_parm_insn = PREV_INSN (next);
1648 }
1649 else
1650 {
1651 struct fixup_replacement *replacements = 0;
1652 rtx next_insn = NEXT_INSN (insn);
1653
1654 if (SMALL_REGISTER_CLASSES)
1655 {
1656 /* If the insn that copies the results of a CALL_INSN
1657 into a pseudo now references VAR, we have to use an
1658 intermediate pseudo since we want the life of the
1659 return value register to be only a single insn.
1660
1661 If we don't use an intermediate pseudo, such things as
1662 address computations to make the address of VAR valid
1663 if it is not can be placed between the CALL_INSN and INSN.
1664
1665 To make sure this doesn't happen, we record the destination
1666 of the CALL_INSN and see if the next insn uses both that
1667 and VAR. */
1668
1669 if (call_dest != 0 && GET_CODE (insn) == INSN
1670 && reg_mentioned_p (var, PATTERN (insn))
1671 && reg_mentioned_p (call_dest, PATTERN (insn)))
1672 {
1673 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1674
1675 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1676
1677 PATTERN (insn) = replace_rtx (PATTERN (insn),
1678 call_dest, temp);
1679 }
1680
1681 if (GET_CODE (insn) == CALL_INSN
1682 && GET_CODE (PATTERN (insn)) == SET)
1683 call_dest = SET_DEST (PATTERN (insn));
1684 else if (GET_CODE (insn) == CALL_INSN
1685 && GET_CODE (PATTERN (insn)) == PARALLEL
1686 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1687 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1688 else
1689 call_dest = 0;
1690 }
1691
1692 /* See if we have to do anything to INSN now that VAR is in
1693 memory. If it needs to be loaded into a pseudo, use a single
1694 pseudo for the entire insn in case there is a MATCH_DUP
1695 between two operands. We pass a pointer to the head of
1696 a list of struct fixup_replacements. If fixup_var_refs_1
1697 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1698 it will record them in this list.
1699
1700 If it allocated a pseudo for any replacement, we copy into
1701 it here. */
1702
1703 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1704 &replacements);
1705
1706 /* If this is last_parm_insn, and any instructions were output
1707 after it to fix it up, then we must set last_parm_insn to
1708 the last such instruction emitted. */
1709 if (insn == last_parm_insn)
1710 last_parm_insn = PREV_INSN (next_insn);
1711
1712 while (replacements)
1713 {
1714 if (GET_CODE (replacements->new) == REG)
1715 {
1716 rtx insert_before;
1717 rtx seq;
1718
1719 /* OLD might be a (subreg (mem)). */
1720 if (GET_CODE (replacements->old) == SUBREG)
1721 replacements->old
1722 = fixup_memory_subreg (replacements->old, insn, 0);
1723 else
1724 replacements->old
1725 = fixup_stack_1 (replacements->old, insn);
1726
1727 insert_before = insn;
1728
1729 /* If we are changing the mode, do a conversion.
1730 This might be wasteful, but combine.c will
1731 eliminate much of the waste. */
1732
1733 if (GET_MODE (replacements->new)
1734 != GET_MODE (replacements->old))
1735 {
1736 start_sequence ();
1737 convert_move (replacements->new,
1738 replacements->old, unsignedp);
1739 seq = gen_sequence ();
1740 end_sequence ();
1741 }
1742 else
1743 seq = gen_move_insn (replacements->new,
1744 replacements->old);
1745
1746 emit_insn_before (seq, insert_before);
1747 }
1748
1749 replacements = replacements->next;
1750 }
1751 }
1752
1753 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1754 But don't touch other insns referred to by reg-notes;
1755 we will get them elsewhere. */
1756 while (note)
1757 {
1758 if (GET_CODE (note) != INSN_LIST)
1759 XEXP (note, 0)
1760 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1761 note = XEXP (note, 1);
1762 }
1763 }
1764
1765 if (!ht)
1766 insn = next;
1767 else if (insn_list)
1768 {
1769 insn = XEXP (insn_list, 0);
1770 insn_list = XEXP (insn_list, 1);
1771 }
1772 else
1773 insn = NULL_RTX;
1774 }
1775 }
1776 \f
1777 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1778 See if the rtx expression at *LOC in INSN needs to be changed.
1779
1780 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1781 contain a list of original rtx's and replacements. If we find that we need
1782 to modify this insn by replacing a memory reference with a pseudo or by
1783 making a new MEM to implement a SUBREG, we consult that list to see if
1784 we have already chosen a replacement. If none has already been allocated,
1785 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1786 or the SUBREG, as appropriate, to the pseudo. */
1787
1788 static void
1789 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1790 register rtx var;
1791 enum machine_mode promoted_mode;
1792 register rtx *loc;
1793 rtx insn;
1794 struct fixup_replacement **replacements;
1795 {
1796 register int i;
1797 register rtx x = *loc;
1798 RTX_CODE code = GET_CODE (x);
1799 register const char *fmt;
1800 register rtx tem, tem1;
1801 struct fixup_replacement *replacement;
1802
1803 switch (code)
1804 {
1805 case ADDRESSOF:
1806 if (XEXP (x, 0) == var)
1807 {
1808 /* Prevent sharing of rtl that might lose. */
1809 rtx sub = copy_rtx (XEXP (var, 0));
1810
1811 if (! validate_change (insn, loc, sub, 0))
1812 {
1813 rtx y = gen_reg_rtx (GET_MODE (sub));
1814 rtx seq, new_insn;
1815
1816 /* We should be able to replace with a register or all is lost.
1817 Note that we can't use validate_change to verify this, since
1818 we're not caring for replacing all dups simultaneously. */
1819 if (! validate_replace_rtx (*loc, y, insn))
1820 abort ();
1821
1822 /* Careful! First try to recognize a direct move of the
1823 value, mimicking how things are done in gen_reload wrt
1824 PLUS. Consider what happens when insn is a conditional
1825 move instruction and addsi3 clobbers flags. */
1826
1827 start_sequence ();
1828 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1829 seq = gen_sequence ();
1830 end_sequence ();
1831
1832 if (recog_memoized (new_insn) < 0)
1833 {
1834 /* That failed. Fall back on force_operand and hope. */
1835
1836 start_sequence ();
1837 force_operand (sub, y);
1838 seq = gen_sequence ();
1839 end_sequence ();
1840 }
1841
1842 #ifdef HAVE_cc0
1843 /* Don't separate setter from user. */
1844 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1845 insn = PREV_INSN (insn);
1846 #endif
1847
1848 emit_insn_before (seq, insn);
1849 }
1850 }
1851 return;
1852
1853 case MEM:
1854 if (var == x)
1855 {
1856 /* If we already have a replacement, use it. Otherwise,
1857 try to fix up this address in case it is invalid. */
1858
1859 replacement = find_fixup_replacement (replacements, var);
1860 if (replacement->new)
1861 {
1862 *loc = replacement->new;
1863 return;
1864 }
1865
1866 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1867
1868 /* Unless we are forcing memory to register or we changed the mode,
1869 we can leave things the way they are if the insn is valid. */
1870
1871 INSN_CODE (insn) = -1;
1872 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1873 && recog_memoized (insn) >= 0)
1874 return;
1875
1876 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1877 return;
1878 }
1879
1880 /* If X contains VAR, we need to unshare it here so that we update
1881 each occurrence separately. But all identical MEMs in one insn
1882 must be replaced with the same rtx because of the possibility of
1883 MATCH_DUPs. */
1884
1885 if (reg_mentioned_p (var, x))
1886 {
1887 replacement = find_fixup_replacement (replacements, x);
1888 if (replacement->new == 0)
1889 replacement->new = copy_most_rtx (x, var);
1890
1891 *loc = x = replacement->new;
1892 }
1893 break;
1894
1895 case REG:
1896 case CC0:
1897 case PC:
1898 case CONST_INT:
1899 case CONST:
1900 case SYMBOL_REF:
1901 case LABEL_REF:
1902 case CONST_DOUBLE:
1903 return;
1904
1905 case SIGN_EXTRACT:
1906 case ZERO_EXTRACT:
1907 /* Note that in some cases those types of expressions are altered
1908 by optimize_bit_field, and do not survive to get here. */
1909 if (XEXP (x, 0) == var
1910 || (GET_CODE (XEXP (x, 0)) == SUBREG
1911 && SUBREG_REG (XEXP (x, 0)) == var))
1912 {
1913 /* Get TEM as a valid MEM in the mode presently in the insn.
1914
1915 We don't worry about the possibility of MATCH_DUP here; it
1916 is highly unlikely and would be tricky to handle. */
1917
1918 tem = XEXP (x, 0);
1919 if (GET_CODE (tem) == SUBREG)
1920 {
1921 if (GET_MODE_BITSIZE (GET_MODE (tem))
1922 > GET_MODE_BITSIZE (GET_MODE (var)))
1923 {
1924 replacement = find_fixup_replacement (replacements, var);
1925 if (replacement->new == 0)
1926 replacement->new = gen_reg_rtx (GET_MODE (var));
1927 SUBREG_REG (tem) = replacement->new;
1928 }
1929 else
1930 tem = fixup_memory_subreg (tem, insn, 0);
1931 }
1932 else
1933 tem = fixup_stack_1 (tem, insn);
1934
1935 /* Unless we want to load from memory, get TEM into the proper mode
1936 for an extract from memory. This can only be done if the
1937 extract is at a constant position and length. */
1938
1939 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1940 && GET_CODE (XEXP (x, 2)) == CONST_INT
1941 && ! mode_dependent_address_p (XEXP (tem, 0))
1942 && ! MEM_VOLATILE_P (tem))
1943 {
1944 enum machine_mode wanted_mode = VOIDmode;
1945 enum machine_mode is_mode = GET_MODE (tem);
1946 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1947
1948 #ifdef HAVE_extzv
1949 if (GET_CODE (x) == ZERO_EXTRACT)
1950 {
1951 wanted_mode
1952 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
1953 if (wanted_mode == VOIDmode)
1954 wanted_mode = word_mode;
1955 }
1956 #endif
1957 #ifdef HAVE_extv
1958 if (GET_CODE (x) == SIGN_EXTRACT)
1959 {
1960 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
1961 if (wanted_mode == VOIDmode)
1962 wanted_mode = word_mode;
1963 }
1964 #endif
1965 /* If we have a narrower mode, we can do something. */
1966 if (wanted_mode != VOIDmode
1967 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1968 {
1969 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1970 rtx old_pos = XEXP (x, 2);
1971 rtx newmem;
1972
1973 /* If the bytes and bits are counted differently, we
1974 must adjust the offset. */
1975 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1976 offset = (GET_MODE_SIZE (is_mode)
1977 - GET_MODE_SIZE (wanted_mode) - offset);
1978
1979 pos %= GET_MODE_BITSIZE (wanted_mode);
1980
1981 newmem = gen_rtx_MEM (wanted_mode,
1982 plus_constant (XEXP (tem, 0), offset));
1983 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1984 MEM_COPY_ATTRIBUTES (newmem, tem);
1985
1986 /* Make the change and see if the insn remains valid. */
1987 INSN_CODE (insn) = -1;
1988 XEXP (x, 0) = newmem;
1989 XEXP (x, 2) = GEN_INT (pos);
1990
1991 if (recog_memoized (insn) >= 0)
1992 return;
1993
1994 /* Otherwise, restore old position. XEXP (x, 0) will be
1995 restored later. */
1996 XEXP (x, 2) = old_pos;
1997 }
1998 }
1999
2000 /* If we get here, the bitfield extract insn can't accept a memory
2001 reference. Copy the input into a register. */
2002
2003 tem1 = gen_reg_rtx (GET_MODE (tem));
2004 emit_insn_before (gen_move_insn (tem1, tem), insn);
2005 XEXP (x, 0) = tem1;
2006 return;
2007 }
2008 break;
2009
2010 case SUBREG:
2011 if (SUBREG_REG (x) == var)
2012 {
2013 /* If this is a special SUBREG made because VAR was promoted
2014 from a wider mode, replace it with VAR and call ourself
2015 recursively, this time saying that the object previously
2016 had its current mode (by virtue of the SUBREG). */
2017
2018 if (SUBREG_PROMOTED_VAR_P (x))
2019 {
2020 *loc = var;
2021 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2022 return;
2023 }
2024
2025 /* If this SUBREG makes VAR wider, it has become a paradoxical
2026 SUBREG with VAR in memory, but these aren't allowed at this
2027 stage of the compilation. So load VAR into a pseudo and take
2028 a SUBREG of that pseudo. */
2029 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2030 {
2031 replacement = find_fixup_replacement (replacements, var);
2032 if (replacement->new == 0)
2033 replacement->new = gen_reg_rtx (GET_MODE (var));
2034 SUBREG_REG (x) = replacement->new;
2035 return;
2036 }
2037
2038 /* See if we have already found a replacement for this SUBREG.
2039 If so, use it. Otherwise, make a MEM and see if the insn
2040 is recognized. If not, or if we should force MEM into a register,
2041 make a pseudo for this SUBREG. */
2042 replacement = find_fixup_replacement (replacements, x);
2043 if (replacement->new)
2044 {
2045 *loc = replacement->new;
2046 return;
2047 }
2048
2049 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2050
2051 INSN_CODE (insn) = -1;
2052 if (! flag_force_mem && recog_memoized (insn) >= 0)
2053 return;
2054
2055 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2056 return;
2057 }
2058 break;
2059
2060 case SET:
2061 /* First do special simplification of bit-field references. */
2062 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2063 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2064 optimize_bit_field (x, insn, 0);
2065 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2066 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2067 optimize_bit_field (x, insn, NULL_PTR);
2068
2069 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2070 into a register and then store it back out. */
2071 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2072 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2073 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2074 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2075 > GET_MODE_SIZE (GET_MODE (var))))
2076 {
2077 replacement = find_fixup_replacement (replacements, var);
2078 if (replacement->new == 0)
2079 replacement->new = gen_reg_rtx (GET_MODE (var));
2080
2081 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2082 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2083 }
2084
2085 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2086 insn into a pseudo and store the low part of the pseudo into VAR. */
2087 if (GET_CODE (SET_DEST (x)) == SUBREG
2088 && SUBREG_REG (SET_DEST (x)) == var
2089 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2090 > GET_MODE_SIZE (GET_MODE (var))))
2091 {
2092 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2093 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2094 tem)),
2095 insn);
2096 break;
2097 }
2098
2099 {
2100 rtx dest = SET_DEST (x);
2101 rtx src = SET_SRC (x);
2102 #ifdef HAVE_insv
2103 rtx outerdest = dest;
2104 #endif
2105
2106 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2107 || GET_CODE (dest) == SIGN_EXTRACT
2108 || GET_CODE (dest) == ZERO_EXTRACT)
2109 dest = XEXP (dest, 0);
2110
2111 if (GET_CODE (src) == SUBREG)
2112 src = XEXP (src, 0);
2113
2114 /* If VAR does not appear at the top level of the SET
2115 just scan the lower levels of the tree. */
2116
2117 if (src != var && dest != var)
2118 break;
2119
2120 /* We will need to rerecognize this insn. */
2121 INSN_CODE (insn) = -1;
2122
2123 #ifdef HAVE_insv
2124 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2125 {
2126 /* Since this case will return, ensure we fixup all the
2127 operands here. */
2128 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2129 insn, replacements);
2130 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2131 insn, replacements);
2132 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2133 insn, replacements);
2134
2135 tem = XEXP (outerdest, 0);
2136
2137 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2138 that may appear inside a ZERO_EXTRACT.
2139 This was legitimate when the MEM was a REG. */
2140 if (GET_CODE (tem) == SUBREG
2141 && SUBREG_REG (tem) == var)
2142 tem = fixup_memory_subreg (tem, insn, 0);
2143 else
2144 tem = fixup_stack_1 (tem, insn);
2145
2146 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2147 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2148 && ! mode_dependent_address_p (XEXP (tem, 0))
2149 && ! MEM_VOLATILE_P (tem))
2150 {
2151 enum machine_mode wanted_mode;
2152 enum machine_mode is_mode = GET_MODE (tem);
2153 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2154
2155 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2156 if (wanted_mode == VOIDmode)
2157 wanted_mode = word_mode;
2158
2159 /* If we have a narrower mode, we can do something. */
2160 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2161 {
2162 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2163 rtx old_pos = XEXP (outerdest, 2);
2164 rtx newmem;
2165
2166 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2167 offset = (GET_MODE_SIZE (is_mode)
2168 - GET_MODE_SIZE (wanted_mode) - offset);
2169
2170 pos %= GET_MODE_BITSIZE (wanted_mode);
2171
2172 newmem = gen_rtx_MEM (wanted_mode,
2173 plus_constant (XEXP (tem, 0),
2174 offset));
2175 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2176 MEM_COPY_ATTRIBUTES (newmem, tem);
2177
2178 /* Make the change and see if the insn remains valid. */
2179 INSN_CODE (insn) = -1;
2180 XEXP (outerdest, 0) = newmem;
2181 XEXP (outerdest, 2) = GEN_INT (pos);
2182
2183 if (recog_memoized (insn) >= 0)
2184 return;
2185
2186 /* Otherwise, restore old position. XEXP (x, 0) will be
2187 restored later. */
2188 XEXP (outerdest, 2) = old_pos;
2189 }
2190 }
2191
2192 /* If we get here, the bit-field store doesn't allow memory
2193 or isn't located at a constant position. Load the value into
2194 a register, do the store, and put it back into memory. */
2195
2196 tem1 = gen_reg_rtx (GET_MODE (tem));
2197 emit_insn_before (gen_move_insn (tem1, tem), insn);
2198 emit_insn_after (gen_move_insn (tem, tem1), insn);
2199 XEXP (outerdest, 0) = tem1;
2200 return;
2201 }
2202 #endif
2203
2204 /* STRICT_LOW_PART is a no-op on memory references
2205 and it can cause combinations to be unrecognizable,
2206 so eliminate it. */
2207
2208 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2209 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2210
2211 /* A valid insn to copy VAR into or out of a register
2212 must be left alone, to avoid an infinite loop here.
2213 If the reference to VAR is by a subreg, fix that up,
2214 since SUBREG is not valid for a memref.
2215 Also fix up the address of the stack slot.
2216
2217 Note that we must not try to recognize the insn until
2218 after we know that we have valid addresses and no
2219 (subreg (mem ...) ...) constructs, since these interfere
2220 with determining the validity of the insn. */
2221
2222 if ((SET_SRC (x) == var
2223 || (GET_CODE (SET_SRC (x)) == SUBREG
2224 && SUBREG_REG (SET_SRC (x)) == var))
2225 && (GET_CODE (SET_DEST (x)) == REG
2226 || (GET_CODE (SET_DEST (x)) == SUBREG
2227 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2228 && GET_MODE (var) == promoted_mode
2229 && x == single_set (insn))
2230 {
2231 rtx pat;
2232
2233 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2234 if (replacement->new)
2235 SET_SRC (x) = replacement->new;
2236 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2237 SET_SRC (x) = replacement->new
2238 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2239 else
2240 SET_SRC (x) = replacement->new
2241 = fixup_stack_1 (SET_SRC (x), insn);
2242
2243 if (recog_memoized (insn) >= 0)
2244 return;
2245
2246 /* INSN is not valid, but we know that we want to
2247 copy SET_SRC (x) to SET_DEST (x) in some way. So
2248 we generate the move and see whether it requires more
2249 than one insn. If it does, we emit those insns and
2250 delete INSN. Otherwise, we an just replace the pattern
2251 of INSN; we have already verified above that INSN has
2252 no other function that to do X. */
2253
2254 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2255 if (GET_CODE (pat) == SEQUENCE)
2256 {
2257 emit_insn_after (pat, insn);
2258 PUT_CODE (insn, NOTE);
2259 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2260 NOTE_SOURCE_FILE (insn) = 0;
2261 }
2262 else
2263 PATTERN (insn) = pat;
2264
2265 return;
2266 }
2267
2268 if ((SET_DEST (x) == var
2269 || (GET_CODE (SET_DEST (x)) == SUBREG
2270 && SUBREG_REG (SET_DEST (x)) == var))
2271 && (GET_CODE (SET_SRC (x)) == REG
2272 || (GET_CODE (SET_SRC (x)) == SUBREG
2273 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2274 && GET_MODE (var) == promoted_mode
2275 && x == single_set (insn))
2276 {
2277 rtx pat;
2278
2279 if (GET_CODE (SET_DEST (x)) == SUBREG)
2280 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2281 else
2282 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2283
2284 if (recog_memoized (insn) >= 0)
2285 return;
2286
2287 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2288 if (GET_CODE (pat) == SEQUENCE)
2289 {
2290 emit_insn_after (pat, insn);
2291 PUT_CODE (insn, NOTE);
2292 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2293 NOTE_SOURCE_FILE (insn) = 0;
2294 }
2295 else
2296 PATTERN (insn) = pat;
2297
2298 return;
2299 }
2300
2301 /* Otherwise, storing into VAR must be handled specially
2302 by storing into a temporary and copying that into VAR
2303 with a new insn after this one. Note that this case
2304 will be used when storing into a promoted scalar since
2305 the insn will now have different modes on the input
2306 and output and hence will be invalid (except for the case
2307 of setting it to a constant, which does not need any
2308 change if it is valid). We generate extra code in that case,
2309 but combine.c will eliminate it. */
2310
2311 if (dest == var)
2312 {
2313 rtx temp;
2314 rtx fixeddest = SET_DEST (x);
2315
2316 /* STRICT_LOW_PART can be discarded, around a MEM. */
2317 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2318 fixeddest = XEXP (fixeddest, 0);
2319 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2320 if (GET_CODE (fixeddest) == SUBREG)
2321 {
2322 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2323 promoted_mode = GET_MODE (fixeddest);
2324 }
2325 else
2326 fixeddest = fixup_stack_1 (fixeddest, insn);
2327
2328 temp = gen_reg_rtx (promoted_mode);
2329
2330 emit_insn_after (gen_move_insn (fixeddest,
2331 gen_lowpart (GET_MODE (fixeddest),
2332 temp)),
2333 insn);
2334
2335 SET_DEST (x) = temp;
2336 }
2337 }
2338
2339 default:
2340 break;
2341 }
2342
2343 /* Nothing special about this RTX; fix its operands. */
2344
2345 fmt = GET_RTX_FORMAT (code);
2346 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2347 {
2348 if (fmt[i] == 'e')
2349 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2350 else if (fmt[i] == 'E')
2351 {
2352 register int j;
2353 for (j = 0; j < XVECLEN (x, i); j++)
2354 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2355 insn, replacements);
2356 }
2357 }
2358 }
2359 \f
2360 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2361 return an rtx (MEM:m1 newaddr) which is equivalent.
2362 If any insns must be emitted to compute NEWADDR, put them before INSN.
2363
2364 UNCRITICAL nonzero means accept paradoxical subregs.
2365 This is used for subregs found inside REG_NOTES. */
2366
2367 static rtx
2368 fixup_memory_subreg (x, insn, uncritical)
2369 rtx x;
2370 rtx insn;
2371 int uncritical;
2372 {
2373 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2374 rtx addr = XEXP (SUBREG_REG (x), 0);
2375 enum machine_mode mode = GET_MODE (x);
2376 rtx result;
2377
2378 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2379 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2380 && ! uncritical)
2381 abort ();
2382
2383 if (BYTES_BIG_ENDIAN)
2384 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2385 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2386 addr = plus_constant (addr, offset);
2387 if (!flag_force_addr && memory_address_p (mode, addr))
2388 /* Shortcut if no insns need be emitted. */
2389 return change_address (SUBREG_REG (x), mode, addr);
2390 start_sequence ();
2391 result = change_address (SUBREG_REG (x), mode, addr);
2392 emit_insn_before (gen_sequence (), insn);
2393 end_sequence ();
2394 return result;
2395 }
2396
2397 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2398 Replace subexpressions of X in place.
2399 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2400 Otherwise return X, with its contents possibly altered.
2401
2402 If any insns must be emitted to compute NEWADDR, put them before INSN.
2403
2404 UNCRITICAL is as in fixup_memory_subreg. */
2405
2406 static rtx
2407 walk_fixup_memory_subreg (x, insn, uncritical)
2408 register rtx x;
2409 rtx insn;
2410 int uncritical;
2411 {
2412 register enum rtx_code code;
2413 register const char *fmt;
2414 register int i;
2415
2416 if (x == 0)
2417 return 0;
2418
2419 code = GET_CODE (x);
2420
2421 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2422 return fixup_memory_subreg (x, insn, uncritical);
2423
2424 /* Nothing special about this RTX; fix its operands. */
2425
2426 fmt = GET_RTX_FORMAT (code);
2427 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2428 {
2429 if (fmt[i] == 'e')
2430 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2431 else if (fmt[i] == 'E')
2432 {
2433 register int j;
2434 for (j = 0; j < XVECLEN (x, i); j++)
2435 XVECEXP (x, i, j)
2436 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2437 }
2438 }
2439 return x;
2440 }
2441 \f
2442 /* For each memory ref within X, if it refers to a stack slot
2443 with an out of range displacement, put the address in a temp register
2444 (emitting new insns before INSN to load these registers)
2445 and alter the memory ref to use that register.
2446 Replace each such MEM rtx with a copy, to avoid clobberage. */
2447
2448 static rtx
2449 fixup_stack_1 (x, insn)
2450 rtx x;
2451 rtx insn;
2452 {
2453 register int i;
2454 register RTX_CODE code = GET_CODE (x);
2455 register const char *fmt;
2456
2457 if (code == MEM)
2458 {
2459 register rtx ad = XEXP (x, 0);
2460 /* If we have address of a stack slot but it's not valid
2461 (displacement is too large), compute the sum in a register. */
2462 if (GET_CODE (ad) == PLUS
2463 && GET_CODE (XEXP (ad, 0)) == REG
2464 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2465 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2466 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2467 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2468 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2469 #endif
2470 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2471 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2472 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2473 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2474 {
2475 rtx temp, seq;
2476 if (memory_address_p (GET_MODE (x), ad))
2477 return x;
2478
2479 start_sequence ();
2480 temp = copy_to_reg (ad);
2481 seq = gen_sequence ();
2482 end_sequence ();
2483 emit_insn_before (seq, insn);
2484 return change_address (x, VOIDmode, temp);
2485 }
2486 return x;
2487 }
2488
2489 fmt = GET_RTX_FORMAT (code);
2490 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2491 {
2492 if (fmt[i] == 'e')
2493 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2494 else if (fmt[i] == 'E')
2495 {
2496 register int j;
2497 for (j = 0; j < XVECLEN (x, i); j++)
2498 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2499 }
2500 }
2501 return x;
2502 }
2503 \f
2504 /* Optimization: a bit-field instruction whose field
2505 happens to be a byte or halfword in memory
2506 can be changed to a move instruction.
2507
2508 We call here when INSN is an insn to examine or store into a bit-field.
2509 BODY is the SET-rtx to be altered.
2510
2511 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2512 (Currently this is called only from function.c, and EQUIV_MEM
2513 is always 0.) */
2514
2515 static void
2516 optimize_bit_field (body, insn, equiv_mem)
2517 rtx body;
2518 rtx insn;
2519 rtx *equiv_mem;
2520 {
2521 register rtx bitfield;
2522 int destflag;
2523 rtx seq = 0;
2524 enum machine_mode mode;
2525
2526 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2527 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2528 bitfield = SET_DEST (body), destflag = 1;
2529 else
2530 bitfield = SET_SRC (body), destflag = 0;
2531
2532 /* First check that the field being stored has constant size and position
2533 and is in fact a byte or halfword suitably aligned. */
2534
2535 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2536 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2537 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2538 != BLKmode)
2539 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2540 {
2541 register rtx memref = 0;
2542
2543 /* Now check that the containing word is memory, not a register,
2544 and that it is safe to change the machine mode. */
2545
2546 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2547 memref = XEXP (bitfield, 0);
2548 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2549 && equiv_mem != 0)
2550 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2551 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2552 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2553 memref = SUBREG_REG (XEXP (bitfield, 0));
2554 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2555 && equiv_mem != 0
2556 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2557 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2558
2559 if (memref
2560 && ! mode_dependent_address_p (XEXP (memref, 0))
2561 && ! MEM_VOLATILE_P (memref))
2562 {
2563 /* Now adjust the address, first for any subreg'ing
2564 that we are now getting rid of,
2565 and then for which byte of the word is wanted. */
2566
2567 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2568 rtx insns;
2569
2570 /* Adjust OFFSET to count bits from low-address byte. */
2571 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2572 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2573 - offset - INTVAL (XEXP (bitfield, 1)));
2574
2575 /* Adjust OFFSET to count bytes from low-address byte. */
2576 offset /= BITS_PER_UNIT;
2577 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2578 {
2579 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2580 if (BYTES_BIG_ENDIAN)
2581 offset -= (MIN (UNITS_PER_WORD,
2582 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2583 - MIN (UNITS_PER_WORD,
2584 GET_MODE_SIZE (GET_MODE (memref))));
2585 }
2586
2587 start_sequence ();
2588 memref = change_address (memref, mode,
2589 plus_constant (XEXP (memref, 0), offset));
2590 insns = get_insns ();
2591 end_sequence ();
2592 emit_insns_before (insns, insn);
2593
2594 /* Store this memory reference where
2595 we found the bit field reference. */
2596
2597 if (destflag)
2598 {
2599 validate_change (insn, &SET_DEST (body), memref, 1);
2600 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2601 {
2602 rtx src = SET_SRC (body);
2603 while (GET_CODE (src) == SUBREG
2604 && SUBREG_WORD (src) == 0)
2605 src = SUBREG_REG (src);
2606 if (GET_MODE (src) != GET_MODE (memref))
2607 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2608 validate_change (insn, &SET_SRC (body), src, 1);
2609 }
2610 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2611 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2612 /* This shouldn't happen because anything that didn't have
2613 one of these modes should have got converted explicitly
2614 and then referenced through a subreg.
2615 This is so because the original bit-field was
2616 handled by agg_mode and so its tree structure had
2617 the same mode that memref now has. */
2618 abort ();
2619 }
2620 else
2621 {
2622 rtx dest = SET_DEST (body);
2623
2624 while (GET_CODE (dest) == SUBREG
2625 && SUBREG_WORD (dest) == 0
2626 && (GET_MODE_CLASS (GET_MODE (dest))
2627 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2628 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2629 <= UNITS_PER_WORD))
2630 dest = SUBREG_REG (dest);
2631
2632 validate_change (insn, &SET_DEST (body), dest, 1);
2633
2634 if (GET_MODE (dest) == GET_MODE (memref))
2635 validate_change (insn, &SET_SRC (body), memref, 1);
2636 else
2637 {
2638 /* Convert the mem ref to the destination mode. */
2639 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2640
2641 start_sequence ();
2642 convert_move (newreg, memref,
2643 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2644 seq = get_insns ();
2645 end_sequence ();
2646
2647 validate_change (insn, &SET_SRC (body), newreg, 1);
2648 }
2649 }
2650
2651 /* See if we can convert this extraction or insertion into
2652 a simple move insn. We might not be able to do so if this
2653 was, for example, part of a PARALLEL.
2654
2655 If we succeed, write out any needed conversions. If we fail,
2656 it is hard to guess why we failed, so don't do anything
2657 special; just let the optimization be suppressed. */
2658
2659 if (apply_change_group () && seq)
2660 emit_insns_before (seq, insn);
2661 }
2662 }
2663 }
2664 \f
2665 /* These routines are responsible for converting virtual register references
2666 to the actual hard register references once RTL generation is complete.
2667
2668 The following four variables are used for communication between the
2669 routines. They contain the offsets of the virtual registers from their
2670 respective hard registers. */
2671
2672 static int in_arg_offset;
2673 static int var_offset;
2674 static int dynamic_offset;
2675 static int out_arg_offset;
2676 static int cfa_offset;
2677
2678 /* In most machines, the stack pointer register is equivalent to the bottom
2679 of the stack. */
2680
2681 #ifndef STACK_POINTER_OFFSET
2682 #define STACK_POINTER_OFFSET 0
2683 #endif
2684
2685 /* If not defined, pick an appropriate default for the offset of dynamically
2686 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2687 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2688
2689 #ifndef STACK_DYNAMIC_OFFSET
2690
2691 #ifdef ACCUMULATE_OUTGOING_ARGS
2692 /* The bottom of the stack points to the actual arguments. If
2693 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2694 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2695 stack space for register parameters is not pushed by the caller, but
2696 rather part of the fixed stack areas and hence not included in
2697 `current_function_outgoing_args_size'. Nevertheless, we must allow
2698 for it when allocating stack dynamic objects. */
2699
2700 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2701 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2702 (current_function_outgoing_args_size \
2703 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2704
2705 #else
2706 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2707 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2708 #endif
2709
2710 #else
2711 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2712 #endif
2713 #endif
2714
2715 /* On a few machines, the CFA coincides with the arg pointer. */
2716
2717 #ifndef ARG_POINTER_CFA_OFFSET
2718 #define ARG_POINTER_CFA_OFFSET 0
2719 #endif
2720
2721
2722 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2723 its address taken. DECL is the decl for the object stored in the
2724 register, for later use if we do need to force REG into the stack.
2725 REG is overwritten by the MEM like in put_reg_into_stack. */
2726
2727 rtx
2728 gen_mem_addressof (reg, decl)
2729 rtx reg;
2730 tree decl;
2731 {
2732 tree type = TREE_TYPE (decl);
2733 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2734 REGNO (reg), decl);
2735
2736 /* If the original REG was a user-variable, then so is the REG whose
2737 address is being taken. Likewise for unchanging. */
2738 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2739 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2740
2741 PUT_CODE (reg, MEM);
2742 PUT_MODE (reg, DECL_MODE (decl));
2743 XEXP (reg, 0) = r;
2744 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2745 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2746 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2747
2748 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2749 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2750
2751 return reg;
2752 }
2753
2754 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2755
2756 #if 0
2757 void
2758 flush_addressof (decl)
2759 tree decl;
2760 {
2761 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2762 && DECL_RTL (decl) != 0
2763 && GET_CODE (DECL_RTL (decl)) == MEM
2764 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2765 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2766 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2767 }
2768 #endif
2769
2770 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2771
2772 static void
2773 put_addressof_into_stack (r, ht)
2774 rtx r;
2775 struct hash_table *ht;
2776 {
2777 tree decl = ADDRESSOF_DECL (r);
2778 rtx reg = XEXP (r, 0);
2779
2780 if (GET_CODE (reg) != REG)
2781 abort ();
2782
2783 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2784 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2785 ADDRESSOF_REGNO (r),
2786 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
2787 }
2788
2789 /* List of replacements made below in purge_addressof_1 when creating
2790 bitfield insertions. */
2791 static rtx purge_bitfield_addressof_replacements;
2792
2793 /* List of replacements made below in purge_addressof_1 for patterns
2794 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2795 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2796 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2797 enough in complex cases, e.g. when some field values can be
2798 extracted by usage MEM with narrower mode. */
2799 static rtx purge_addressof_replacements;
2800
2801 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2802 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2803 the stack. If the function returns FALSE then the replacement could not
2804 be made. */
2805
2806 static boolean
2807 purge_addressof_1 (loc, insn, force, store, ht)
2808 rtx *loc;
2809 rtx insn;
2810 int force, store;
2811 struct hash_table *ht;
2812 {
2813 rtx x;
2814 RTX_CODE code;
2815 int i, j;
2816 const char *fmt;
2817 boolean result = true;
2818
2819 /* Re-start here to avoid recursion in common cases. */
2820 restart:
2821
2822 x = *loc;
2823 if (x == 0)
2824 return true;
2825
2826 code = GET_CODE (x);
2827
2828 /* If we don't return in any of the cases below, we will recurse inside
2829 the RTX, which will normally result in any ADDRESSOF being forced into
2830 memory. */
2831 if (code == SET)
2832 {
2833 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2834 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2835 return result;
2836 }
2837
2838 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2839 {
2840 /* We must create a copy of the rtx because it was created by
2841 overwriting a REG rtx which is always shared. */
2842 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2843 rtx insns;
2844
2845 if (validate_change (insn, loc, sub, 0)
2846 || validate_replace_rtx (x, sub, insn))
2847 return true;
2848
2849 start_sequence ();
2850 sub = force_operand (sub, NULL_RTX);
2851 if (! validate_change (insn, loc, sub, 0)
2852 && ! validate_replace_rtx (x, sub, insn))
2853 abort ();
2854
2855 insns = gen_sequence ();
2856 end_sequence ();
2857 emit_insn_before (insns, insn);
2858 return true;
2859 }
2860
2861 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2862 {
2863 rtx sub = XEXP (XEXP (x, 0), 0);
2864 rtx sub2;
2865
2866 if (GET_CODE (sub) == MEM)
2867 {
2868 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2869 MEM_COPY_ATTRIBUTES (sub2, sub);
2870 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2871 sub = sub2;
2872 }
2873 else if (GET_CODE (sub) == REG
2874 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2875 ;
2876 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2877 {
2878 int size_x, size_sub;
2879
2880 if (!insn)
2881 {
2882 /* When processing REG_NOTES look at the list of
2883 replacements done on the insn to find the register that X
2884 was replaced by. */
2885 rtx tem;
2886
2887 for (tem = purge_bitfield_addressof_replacements;
2888 tem != NULL_RTX;
2889 tem = XEXP (XEXP (tem, 1), 1))
2890 if (rtx_equal_p (x, XEXP (tem, 0)))
2891 {
2892 *loc = XEXP (XEXP (tem, 1), 0);
2893 return true;
2894 }
2895
2896 /* See comment for purge_addressof_replacements. */
2897 for (tem = purge_addressof_replacements;
2898 tem != NULL_RTX;
2899 tem = XEXP (XEXP (tem, 1), 1))
2900 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2901 {
2902 rtx z = XEXP (XEXP (tem, 1), 0);
2903
2904 if (GET_MODE (x) == GET_MODE (z)
2905 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2906 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2907 abort ();
2908
2909 /* It can happen that the note may speak of things
2910 in a wider (or just different) mode than the
2911 code did. This is especially true of
2912 REG_RETVAL. */
2913
2914 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2915 z = SUBREG_REG (z);
2916
2917 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2918 && (GET_MODE_SIZE (GET_MODE (x))
2919 > GET_MODE_SIZE (GET_MODE (z))))
2920 {
2921 /* This can occur as a result in invalid
2922 pointer casts, e.g. float f; ...
2923 *(long long int *)&f.
2924 ??? We could emit a warning here, but
2925 without a line number that wouldn't be
2926 very helpful. */
2927 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2928 }
2929 else
2930 z = gen_lowpart (GET_MODE (x), z);
2931
2932 *loc = z;
2933 return true;
2934 }
2935
2936 /* Sometimes we may not be able to find the replacement. For
2937 example when the original insn was a MEM in a wider mode,
2938 and the note is part of a sign extension of a narrowed
2939 version of that MEM. Gcc testcase compile/990829-1.c can
2940 generate an example of this siutation. Rather than complain
2941 we return false, which will prompt our caller to remove the
2942 offending note. */
2943 return false;
2944 }
2945
2946 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2947 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2948
2949 /* Don't even consider working with paradoxical subregs,
2950 or the moral equivalent seen here. */
2951 if (size_x <= size_sub
2952 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2953 {
2954 /* Do a bitfield insertion to mirror what would happen
2955 in memory. */
2956
2957 rtx val, seq;
2958
2959 if (store)
2960 {
2961 rtx p = PREV_INSN (insn);
2962
2963 start_sequence ();
2964 val = gen_reg_rtx (GET_MODE (x));
2965 if (! validate_change (insn, loc, val, 0))
2966 {
2967 /* Discard the current sequence and put the
2968 ADDRESSOF on stack. */
2969 end_sequence ();
2970 goto give_up;
2971 }
2972 seq = gen_sequence ();
2973 end_sequence ();
2974 emit_insn_before (seq, insn);
2975 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2976 insn, ht);
2977
2978 start_sequence ();
2979 store_bit_field (sub, size_x, 0, GET_MODE (x),
2980 val, GET_MODE_SIZE (GET_MODE (sub)),
2981 GET_MODE_SIZE (GET_MODE (sub)));
2982
2983 /* Make sure to unshare any shared rtl that store_bit_field
2984 might have created. */
2985 unshare_all_rtl_again (get_insns ());
2986
2987 seq = gen_sequence ();
2988 end_sequence ();
2989 p = emit_insn_after (seq, insn);
2990 if (NEXT_INSN (insn))
2991 compute_insns_for_mem (NEXT_INSN (insn),
2992 p ? NEXT_INSN (p) : NULL_RTX,
2993 ht);
2994 }
2995 else
2996 {
2997 rtx p = PREV_INSN (insn);
2998
2999 start_sequence ();
3000 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3001 GET_MODE (x), GET_MODE (x),
3002 GET_MODE_SIZE (GET_MODE (sub)),
3003 GET_MODE_SIZE (GET_MODE (sub)));
3004
3005 if (! validate_change (insn, loc, val, 0))
3006 {
3007 /* Discard the current sequence and put the
3008 ADDRESSOF on stack. */
3009 end_sequence ();
3010 goto give_up;
3011 }
3012
3013 seq = gen_sequence ();
3014 end_sequence ();
3015 emit_insn_before (seq, insn);
3016 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3017 insn, ht);
3018 }
3019
3020 /* Remember the replacement so that the same one can be done
3021 on the REG_NOTES. */
3022 purge_bitfield_addressof_replacements
3023 = gen_rtx_EXPR_LIST (VOIDmode, x,
3024 gen_rtx_EXPR_LIST
3025 (VOIDmode, val,
3026 purge_bitfield_addressof_replacements));
3027
3028 /* We replaced with a reg -- all done. */
3029 return true;
3030 }
3031 }
3032
3033 else if (validate_change (insn, loc, sub, 0))
3034 {
3035 /* Remember the replacement so that the same one can be done
3036 on the REG_NOTES. */
3037 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3038 {
3039 rtx tem;
3040
3041 for (tem = purge_addressof_replacements;
3042 tem != NULL_RTX;
3043 tem = XEXP (XEXP (tem, 1), 1))
3044 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3045 {
3046 XEXP (XEXP (tem, 1), 0) = sub;
3047 return true;
3048 }
3049 purge_addressof_replacements
3050 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3051 gen_rtx_EXPR_LIST (VOIDmode, sub,
3052 purge_addressof_replacements));
3053 return true;
3054 }
3055 goto restart;
3056 }
3057 give_up:;
3058 /* else give up and put it into the stack */
3059 }
3060
3061 else if (code == ADDRESSOF)
3062 {
3063 put_addressof_into_stack (x, ht);
3064 return true;
3065 }
3066 else if (code == SET)
3067 {
3068 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3069 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3070 return result;
3071 }
3072
3073 /* Scan all subexpressions. */
3074 fmt = GET_RTX_FORMAT (code);
3075 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3076 {
3077 if (*fmt == 'e')
3078 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3079 else if (*fmt == 'E')
3080 for (j = 0; j < XVECLEN (x, i); j++)
3081 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3082 }
3083
3084 return result;
3085 }
3086
3087 /* Return a new hash table entry in HT. */
3088
3089 static struct hash_entry *
3090 insns_for_mem_newfunc (he, ht, k)
3091 struct hash_entry *he;
3092 struct hash_table *ht;
3093 hash_table_key k ATTRIBUTE_UNUSED;
3094 {
3095 struct insns_for_mem_entry *ifmhe;
3096 if (he)
3097 return he;
3098
3099 ifmhe = ((struct insns_for_mem_entry *)
3100 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3101 ifmhe->insns = NULL_RTX;
3102
3103 return &ifmhe->he;
3104 }
3105
3106 /* Return a hash value for K, a REG. */
3107
3108 static unsigned long
3109 insns_for_mem_hash (k)
3110 hash_table_key k;
3111 {
3112 /* K is really a RTX. Just use the address as the hash value. */
3113 return (unsigned long) k;
3114 }
3115
3116 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3117
3118 static boolean
3119 insns_for_mem_comp (k1, k2)
3120 hash_table_key k1;
3121 hash_table_key k2;
3122 {
3123 return k1 == k2;
3124 }
3125
3126 struct insns_for_mem_walk_info {
3127 /* The hash table that we are using to record which INSNs use which
3128 MEMs. */
3129 struct hash_table *ht;
3130
3131 /* The INSN we are currently proessing. */
3132 rtx insn;
3133
3134 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3135 to find the insns that use the REGs in the ADDRESSOFs. */
3136 int pass;
3137 };
3138
3139 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3140 that might be used in an ADDRESSOF expression, record this INSN in
3141 the hash table given by DATA (which is really a pointer to an
3142 insns_for_mem_walk_info structure). */
3143
3144 static int
3145 insns_for_mem_walk (r, data)
3146 rtx *r;
3147 void *data;
3148 {
3149 struct insns_for_mem_walk_info *ifmwi
3150 = (struct insns_for_mem_walk_info *) data;
3151
3152 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3153 && GET_CODE (XEXP (*r, 0)) == REG)
3154 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3155 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3156 {
3157 /* Lookup this MEM in the hashtable, creating it if necessary. */
3158 struct insns_for_mem_entry *ifme
3159 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3160 *r,
3161 /*create=*/0,
3162 /*copy=*/0);
3163
3164 /* If we have not already recorded this INSN, do so now. Since
3165 we process the INSNs in order, we know that if we have
3166 recorded it it must be at the front of the list. */
3167 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3168 {
3169 /* We do the allocation on the same obstack as is used for
3170 the hash table since this memory will not be used once
3171 the hash table is deallocated. */
3172 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3173 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3174 ifme->insns);
3175 pop_obstacks ();
3176 }
3177 }
3178
3179 return 0;
3180 }
3181
3182 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3183 which REGs in HT. */
3184
3185 static void
3186 compute_insns_for_mem (insns, last_insn, ht)
3187 rtx insns;
3188 rtx last_insn;
3189 struct hash_table *ht;
3190 {
3191 rtx insn;
3192 struct insns_for_mem_walk_info ifmwi;
3193 ifmwi.ht = ht;
3194
3195 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3196 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3197 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3198 {
3199 ifmwi.insn = insn;
3200 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3201 }
3202 }
3203
3204 /* Helper function for purge_addressof called through for_each_rtx.
3205 Returns true iff the rtl is an ADDRESSOF. */
3206 static int
3207 is_addressof (rtl, data)
3208 rtx * rtl;
3209 void * data ATTRIBUTE_UNUSED;
3210 {
3211 return GET_CODE (* rtl) == ADDRESSOF;
3212 }
3213
3214 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3215 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3216 stack. */
3217
3218 void
3219 purge_addressof (insns)
3220 rtx insns;
3221 {
3222 rtx insn;
3223 struct hash_table ht;
3224
3225 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3226 requires a fixup pass over the instruction stream to correct
3227 INSNs that depended on the REG being a REG, and not a MEM. But,
3228 these fixup passes are slow. Furthermore, more MEMs are not
3229 mentioned in very many instructions. So, we speed up the process
3230 by pre-calculating which REGs occur in which INSNs; that allows
3231 us to perform the fixup passes much more quickly. */
3232 hash_table_init (&ht,
3233 insns_for_mem_newfunc,
3234 insns_for_mem_hash,
3235 insns_for_mem_comp);
3236 compute_insns_for_mem (insns, NULL_RTX, &ht);
3237
3238 for (insn = insns; insn; insn = NEXT_INSN (insn))
3239 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3240 || GET_CODE (insn) == CALL_INSN)
3241 {
3242 if (! purge_addressof_1 (&PATTERN (insn), insn,
3243 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3244 /* If we could not replace the ADDRESSOFs in the insn,
3245 something is wrong. */
3246 abort ();
3247
3248 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3249 {
3250 /* If we could not replace the ADDRESSOFs in the insn's notes,
3251 we can just remove the offending notes instead. */
3252 rtx note;
3253
3254 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3255 {
3256 /* If we find a REG_RETVAL note then the insn is a libcall.
3257 Such insns must have REG_EQUAL notes as well, in order
3258 for later passes of the compiler to work. So it is not
3259 safe to delete the notes here, and instead we abort. */
3260 if (REG_NOTE_KIND (note) == REG_RETVAL)
3261 abort ();
3262 if (for_each_rtx (& note, is_addressof, NULL))
3263 remove_note (insn, note);
3264 }
3265 }
3266 }
3267
3268 /* Clean up. */
3269 hash_table_free (&ht);
3270 purge_bitfield_addressof_replacements = 0;
3271 purge_addressof_replacements = 0;
3272 }
3273 \f
3274 /* Pass through the INSNS of function FNDECL and convert virtual register
3275 references to hard register references. */
3276
3277 void
3278 instantiate_virtual_regs (fndecl, insns)
3279 tree fndecl;
3280 rtx insns;
3281 {
3282 rtx insn;
3283 int i;
3284
3285 /* Compute the offsets to use for this function. */
3286 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3287 var_offset = STARTING_FRAME_OFFSET;
3288 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3289 out_arg_offset = STACK_POINTER_OFFSET;
3290 cfa_offset = ARG_POINTER_CFA_OFFSET;
3291
3292 /* Scan all variables and parameters of this function. For each that is
3293 in memory, instantiate all virtual registers if the result is a valid
3294 address. If not, we do it later. That will handle most uses of virtual
3295 regs on many machines. */
3296 instantiate_decls (fndecl, 1);
3297
3298 /* Initialize recognition, indicating that volatile is OK. */
3299 init_recog ();
3300
3301 /* Scan through all the insns, instantiating every virtual register still
3302 present. */
3303 for (insn = insns; insn; insn = NEXT_INSN (insn))
3304 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3305 || GET_CODE (insn) == CALL_INSN)
3306 {
3307 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3308 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3309 }
3310
3311 /* Instantiate the stack slots for the parm registers, for later use in
3312 addressof elimination. */
3313 for (i = 0; i < max_parm_reg; ++i)
3314 if (parm_reg_stack_loc[i])
3315 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3316
3317 /* Now instantiate the remaining register equivalences for debugging info.
3318 These will not be valid addresses. */
3319 instantiate_decls (fndecl, 0);
3320
3321 /* Indicate that, from now on, assign_stack_local should use
3322 frame_pointer_rtx. */
3323 virtuals_instantiated = 1;
3324 }
3325
3326 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3327 all virtual registers in their DECL_RTL's.
3328
3329 If VALID_ONLY, do this only if the resulting address is still valid.
3330 Otherwise, always do it. */
3331
3332 static void
3333 instantiate_decls (fndecl, valid_only)
3334 tree fndecl;
3335 int valid_only;
3336 {
3337 tree decl;
3338
3339 if (DECL_SAVED_INSNS (fndecl))
3340 /* When compiling an inline function, the obstack used for
3341 rtl allocation is the maybepermanent_obstack. Calling
3342 `resume_temporary_allocation' switches us back to that
3343 obstack while we process this function's parameters. */
3344 resume_temporary_allocation ();
3345
3346 /* Process all parameters of the function. */
3347 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3348 {
3349 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3350
3351 instantiate_decl (DECL_RTL (decl), size, valid_only);
3352
3353 /* If the parameter was promoted, then the incoming RTL mode may be
3354 larger than the declared type size. We must use the larger of
3355 the two sizes. */
3356 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3357 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3358 }
3359
3360 /* Now process all variables defined in the function or its subblocks. */
3361 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3362
3363 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3364 {
3365 /* Save all rtl allocated for this function by raising the
3366 high-water mark on the maybepermanent_obstack. */
3367 preserve_data ();
3368 /* All further rtl allocation is now done in the current_obstack. */
3369 rtl_in_current_obstack ();
3370 }
3371 }
3372
3373 /* Subroutine of instantiate_decls: Process all decls in the given
3374 BLOCK node and all its subblocks. */
3375
3376 static void
3377 instantiate_decls_1 (let, valid_only)
3378 tree let;
3379 int valid_only;
3380 {
3381 tree t;
3382
3383 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3384 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3385 valid_only);
3386
3387 /* Process all subblocks. */
3388 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3389 instantiate_decls_1 (t, valid_only);
3390 }
3391
3392 /* Subroutine of the preceding procedures: Given RTL representing a
3393 decl and the size of the object, do any instantiation required.
3394
3395 If VALID_ONLY is non-zero, it means that the RTL should only be
3396 changed if the new address is valid. */
3397
3398 static void
3399 instantiate_decl (x, size, valid_only)
3400 rtx x;
3401 int size;
3402 int valid_only;
3403 {
3404 enum machine_mode mode;
3405 rtx addr;
3406
3407 /* If this is not a MEM, no need to do anything. Similarly if the
3408 address is a constant or a register that is not a virtual register. */
3409
3410 if (x == 0 || GET_CODE (x) != MEM)
3411 return;
3412
3413 addr = XEXP (x, 0);
3414 if (CONSTANT_P (addr)
3415 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3416 || (GET_CODE (addr) == REG
3417 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3418 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3419 return;
3420
3421 /* If we should only do this if the address is valid, copy the address.
3422 We need to do this so we can undo any changes that might make the
3423 address invalid. This copy is unfortunate, but probably can't be
3424 avoided. */
3425
3426 if (valid_only)
3427 addr = copy_rtx (addr);
3428
3429 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3430
3431 if (valid_only)
3432 {
3433 /* Now verify that the resulting address is valid for every integer or
3434 floating-point mode up to and including SIZE bytes long. We do this
3435 since the object might be accessed in any mode and frame addresses
3436 are shared. */
3437
3438 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3439 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3440 mode = GET_MODE_WIDER_MODE (mode))
3441 if (! memory_address_p (mode, addr))
3442 return;
3443
3444 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3445 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3446 mode = GET_MODE_WIDER_MODE (mode))
3447 if (! memory_address_p (mode, addr))
3448 return;
3449 }
3450
3451 /* Put back the address now that we have updated it and we either know
3452 it is valid or we don't care whether it is valid. */
3453
3454 XEXP (x, 0) = addr;
3455 }
3456 \f
3457 /* Given a pointer to a piece of rtx and an optional pointer to the
3458 containing object, instantiate any virtual registers present in it.
3459
3460 If EXTRA_INSNS, we always do the replacement and generate
3461 any extra insns before OBJECT. If it zero, we do nothing if replacement
3462 is not valid.
3463
3464 Return 1 if we either had nothing to do or if we were able to do the
3465 needed replacement. Return 0 otherwise; we only return zero if
3466 EXTRA_INSNS is zero.
3467
3468 We first try some simple transformations to avoid the creation of extra
3469 pseudos. */
3470
3471 static int
3472 instantiate_virtual_regs_1 (loc, object, extra_insns)
3473 rtx *loc;
3474 rtx object;
3475 int extra_insns;
3476 {
3477 rtx x;
3478 RTX_CODE code;
3479 rtx new = 0;
3480 HOST_WIDE_INT offset = 0;
3481 rtx temp;
3482 rtx seq;
3483 int i, j;
3484 const char *fmt;
3485
3486 /* Re-start here to avoid recursion in common cases. */
3487 restart:
3488
3489 x = *loc;
3490 if (x == 0)
3491 return 1;
3492
3493 code = GET_CODE (x);
3494
3495 /* Check for some special cases. */
3496 switch (code)
3497 {
3498 case CONST_INT:
3499 case CONST_DOUBLE:
3500 case CONST:
3501 case SYMBOL_REF:
3502 case CODE_LABEL:
3503 case PC:
3504 case CC0:
3505 case ASM_INPUT:
3506 case ADDR_VEC:
3507 case ADDR_DIFF_VEC:
3508 case RETURN:
3509 return 1;
3510
3511 case SET:
3512 /* We are allowed to set the virtual registers. This means that
3513 the actual register should receive the source minus the
3514 appropriate offset. This is used, for example, in the handling
3515 of non-local gotos. */
3516 if (SET_DEST (x) == virtual_incoming_args_rtx)
3517 new = arg_pointer_rtx, offset = - in_arg_offset;
3518 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3519 new = frame_pointer_rtx, offset = - var_offset;
3520 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3521 new = stack_pointer_rtx, offset = - dynamic_offset;
3522 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3523 new = stack_pointer_rtx, offset = - out_arg_offset;
3524 else if (SET_DEST (x) == virtual_cfa_rtx)
3525 new = arg_pointer_rtx, offset = - cfa_offset;
3526
3527 if (new)
3528 {
3529 rtx src = SET_SRC (x);
3530
3531 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3532
3533 /* The only valid sources here are PLUS or REG. Just do
3534 the simplest possible thing to handle them. */
3535 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3536 abort ();
3537
3538 start_sequence ();
3539 if (GET_CODE (src) != REG)
3540 temp = force_operand (src, NULL_RTX);
3541 else
3542 temp = src;
3543 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3544 seq = get_insns ();
3545 end_sequence ();
3546
3547 emit_insns_before (seq, object);
3548 SET_DEST (x) = new;
3549
3550 if (! validate_change (object, &SET_SRC (x), temp, 0)
3551 || ! extra_insns)
3552 abort ();
3553
3554 return 1;
3555 }
3556
3557 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3558 loc = &SET_SRC (x);
3559 goto restart;
3560
3561 case PLUS:
3562 /* Handle special case of virtual register plus constant. */
3563 if (CONSTANT_P (XEXP (x, 1)))
3564 {
3565 rtx old, new_offset;
3566
3567 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3568 if (GET_CODE (XEXP (x, 0)) == PLUS)
3569 {
3570 rtx inner = XEXP (XEXP (x, 0), 0);
3571
3572 if (inner == virtual_incoming_args_rtx)
3573 new = arg_pointer_rtx, offset = in_arg_offset;
3574 else if (inner == virtual_stack_vars_rtx)
3575 new = frame_pointer_rtx, offset = var_offset;
3576 else if (inner == virtual_stack_dynamic_rtx)
3577 new = stack_pointer_rtx, offset = dynamic_offset;
3578 else if (inner == virtual_outgoing_args_rtx)
3579 new = stack_pointer_rtx, offset = out_arg_offset;
3580 else if (inner == virtual_cfa_rtx)
3581 new = arg_pointer_rtx, offset = cfa_offset;
3582 else
3583 {
3584 loc = &XEXP (x, 0);
3585 goto restart;
3586 }
3587
3588 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3589 extra_insns);
3590 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3591 }
3592
3593 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3594 new = arg_pointer_rtx, offset = in_arg_offset;
3595 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3596 new = frame_pointer_rtx, offset = var_offset;
3597 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3598 new = stack_pointer_rtx, offset = dynamic_offset;
3599 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3600 new = stack_pointer_rtx, offset = out_arg_offset;
3601 else if (XEXP (x, 0) == virtual_cfa_rtx)
3602 new = arg_pointer_rtx, offset = cfa_offset;
3603 else
3604 {
3605 /* We know the second operand is a constant. Unless the
3606 first operand is a REG (which has been already checked),
3607 it needs to be checked. */
3608 if (GET_CODE (XEXP (x, 0)) != REG)
3609 {
3610 loc = &XEXP (x, 0);
3611 goto restart;
3612 }
3613 return 1;
3614 }
3615
3616 new_offset = plus_constant (XEXP (x, 1), offset);
3617
3618 /* If the new constant is zero, try to replace the sum with just
3619 the register. */
3620 if (new_offset == const0_rtx
3621 && validate_change (object, loc, new, 0))
3622 return 1;
3623
3624 /* Next try to replace the register and new offset.
3625 There are two changes to validate here and we can't assume that
3626 in the case of old offset equals new just changing the register
3627 will yield a valid insn. In the interests of a little efficiency,
3628 however, we only call validate change once (we don't queue up the
3629 changes and then call apply_change_group). */
3630
3631 old = XEXP (x, 0);
3632 if (offset == 0
3633 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3634 : (XEXP (x, 0) = new,
3635 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3636 {
3637 if (! extra_insns)
3638 {
3639 XEXP (x, 0) = old;
3640 return 0;
3641 }
3642
3643 /* Otherwise copy the new constant into a register and replace
3644 constant with that register. */
3645 temp = gen_reg_rtx (Pmode);
3646 XEXP (x, 0) = new;
3647 if (validate_change (object, &XEXP (x, 1), temp, 0))
3648 emit_insn_before (gen_move_insn (temp, new_offset), object);
3649 else
3650 {
3651 /* If that didn't work, replace this expression with a
3652 register containing the sum. */
3653
3654 XEXP (x, 0) = old;
3655 new = gen_rtx_PLUS (Pmode, new, new_offset);
3656
3657 start_sequence ();
3658 temp = force_operand (new, NULL_RTX);
3659 seq = get_insns ();
3660 end_sequence ();
3661
3662 emit_insns_before (seq, object);
3663 if (! validate_change (object, loc, temp, 0)
3664 && ! validate_replace_rtx (x, temp, object))
3665 abort ();
3666 }
3667 }
3668
3669 return 1;
3670 }
3671
3672 /* Fall through to generic two-operand expression case. */
3673 case EXPR_LIST:
3674 case CALL:
3675 case COMPARE:
3676 case MINUS:
3677 case MULT:
3678 case DIV: case UDIV:
3679 case MOD: case UMOD:
3680 case AND: case IOR: case XOR:
3681 case ROTATERT: case ROTATE:
3682 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3683 case NE: case EQ:
3684 case GE: case GT: case GEU: case GTU:
3685 case LE: case LT: case LEU: case LTU:
3686 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3687 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3688 loc = &XEXP (x, 0);
3689 goto restart;
3690
3691 case MEM:
3692 /* Most cases of MEM that convert to valid addresses have already been
3693 handled by our scan of decls. The only special handling we
3694 need here is to make a copy of the rtx to ensure it isn't being
3695 shared if we have to change it to a pseudo.
3696
3697 If the rtx is a simple reference to an address via a virtual register,
3698 it can potentially be shared. In such cases, first try to make it
3699 a valid address, which can also be shared. Otherwise, copy it and
3700 proceed normally.
3701
3702 First check for common cases that need no processing. These are
3703 usually due to instantiation already being done on a previous instance
3704 of a shared rtx. */
3705
3706 temp = XEXP (x, 0);
3707 if (CONSTANT_ADDRESS_P (temp)
3708 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3709 || temp == arg_pointer_rtx
3710 #endif
3711 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3712 || temp == hard_frame_pointer_rtx
3713 #endif
3714 || temp == frame_pointer_rtx)
3715 return 1;
3716
3717 if (GET_CODE (temp) == PLUS
3718 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3719 && (XEXP (temp, 0) == frame_pointer_rtx
3720 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3721 || XEXP (temp, 0) == hard_frame_pointer_rtx
3722 #endif
3723 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3724 || XEXP (temp, 0) == arg_pointer_rtx
3725 #endif
3726 ))
3727 return 1;
3728
3729 if (temp == virtual_stack_vars_rtx
3730 || temp == virtual_incoming_args_rtx
3731 || (GET_CODE (temp) == PLUS
3732 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3733 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3734 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3735 {
3736 /* This MEM may be shared. If the substitution can be done without
3737 the need to generate new pseudos, we want to do it in place
3738 so all copies of the shared rtx benefit. The call below will
3739 only make substitutions if the resulting address is still
3740 valid.
3741
3742 Note that we cannot pass X as the object in the recursive call
3743 since the insn being processed may not allow all valid
3744 addresses. However, if we were not passed on object, we can
3745 only modify X without copying it if X will have a valid
3746 address.
3747
3748 ??? Also note that this can still lose if OBJECT is an insn that
3749 has less restrictions on an address that some other insn.
3750 In that case, we will modify the shared address. This case
3751 doesn't seem very likely, though. One case where this could
3752 happen is in the case of a USE or CLOBBER reference, but we
3753 take care of that below. */
3754
3755 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3756 object ? object : x, 0))
3757 return 1;
3758
3759 /* Otherwise make a copy and process that copy. We copy the entire
3760 RTL expression since it might be a PLUS which could also be
3761 shared. */
3762 *loc = x = copy_rtx (x);
3763 }
3764
3765 /* Fall through to generic unary operation case. */
3766 case SUBREG:
3767 case STRICT_LOW_PART:
3768 case NEG: case NOT:
3769 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3770 case SIGN_EXTEND: case ZERO_EXTEND:
3771 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3772 case FLOAT: case FIX:
3773 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3774 case ABS:
3775 case SQRT:
3776 case FFS:
3777 /* These case either have just one operand or we know that we need not
3778 check the rest of the operands. */
3779 loc = &XEXP (x, 0);
3780 goto restart;
3781
3782 case USE:
3783 case CLOBBER:
3784 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3785 go ahead and make the invalid one, but do it to a copy. For a REG,
3786 just make the recursive call, since there's no chance of a problem. */
3787
3788 if ((GET_CODE (XEXP (x, 0)) == MEM
3789 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3790 0))
3791 || (GET_CODE (XEXP (x, 0)) == REG
3792 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3793 return 1;
3794
3795 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3796 loc = &XEXP (x, 0);
3797 goto restart;
3798
3799 case REG:
3800 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3801 in front of this insn and substitute the temporary. */
3802 if (x == virtual_incoming_args_rtx)
3803 new = arg_pointer_rtx, offset = in_arg_offset;
3804 else if (x == virtual_stack_vars_rtx)
3805 new = frame_pointer_rtx, offset = var_offset;
3806 else if (x == virtual_stack_dynamic_rtx)
3807 new = stack_pointer_rtx, offset = dynamic_offset;
3808 else if (x == virtual_outgoing_args_rtx)
3809 new = stack_pointer_rtx, offset = out_arg_offset;
3810 else if (x == virtual_cfa_rtx)
3811 new = arg_pointer_rtx, offset = cfa_offset;
3812
3813 if (new)
3814 {
3815 temp = plus_constant (new, offset);
3816 if (!validate_change (object, loc, temp, 0))
3817 {
3818 if (! extra_insns)
3819 return 0;
3820
3821 start_sequence ();
3822 temp = force_operand (temp, NULL_RTX);
3823 seq = get_insns ();
3824 end_sequence ();
3825
3826 emit_insns_before (seq, object);
3827 if (! validate_change (object, loc, temp, 0)
3828 && ! validate_replace_rtx (x, temp, object))
3829 abort ();
3830 }
3831 }
3832
3833 return 1;
3834
3835 case ADDRESSOF:
3836 if (GET_CODE (XEXP (x, 0)) == REG)
3837 return 1;
3838
3839 else if (GET_CODE (XEXP (x, 0)) == MEM)
3840 {
3841 /* If we have a (addressof (mem ..)), do any instantiation inside
3842 since we know we'll be making the inside valid when we finally
3843 remove the ADDRESSOF. */
3844 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3845 return 1;
3846 }
3847 break;
3848
3849 default:
3850 break;
3851 }
3852
3853 /* Scan all subexpressions. */
3854 fmt = GET_RTX_FORMAT (code);
3855 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3856 if (*fmt == 'e')
3857 {
3858 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3859 return 0;
3860 }
3861 else if (*fmt == 'E')
3862 for (j = 0; j < XVECLEN (x, i); j++)
3863 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3864 extra_insns))
3865 return 0;
3866
3867 return 1;
3868 }
3869 \f
3870 /* Optimization: assuming this function does not receive nonlocal gotos,
3871 delete the handlers for such, as well as the insns to establish
3872 and disestablish them. */
3873
3874 static void
3875 delete_handlers ()
3876 {
3877 rtx insn;
3878 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3879 {
3880 /* Delete the handler by turning off the flag that would
3881 prevent jump_optimize from deleting it.
3882 Also permit deletion of the nonlocal labels themselves
3883 if nothing local refers to them. */
3884 if (GET_CODE (insn) == CODE_LABEL)
3885 {
3886 tree t, last_t;
3887
3888 LABEL_PRESERVE_P (insn) = 0;
3889
3890 /* Remove it from the nonlocal_label list, to avoid confusing
3891 flow. */
3892 for (t = nonlocal_labels, last_t = 0; t;
3893 last_t = t, t = TREE_CHAIN (t))
3894 if (DECL_RTL (TREE_VALUE (t)) == insn)
3895 break;
3896 if (t)
3897 {
3898 if (! last_t)
3899 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3900 else
3901 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3902 }
3903 }
3904 if (GET_CODE (insn) == INSN)
3905 {
3906 int can_delete = 0;
3907 rtx t;
3908 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3909 if (reg_mentioned_p (t, PATTERN (insn)))
3910 {
3911 can_delete = 1;
3912 break;
3913 }
3914 if (can_delete
3915 || (nonlocal_goto_stack_level != 0
3916 && reg_mentioned_p (nonlocal_goto_stack_level,
3917 PATTERN (insn))))
3918 delete_insn (insn);
3919 }
3920 }
3921 }
3922 \f
3923 int
3924 max_parm_reg_num ()
3925 {
3926 return max_parm_reg;
3927 }
3928
3929 /* Return the first insn following those generated by `assign_parms'. */
3930
3931 rtx
3932 get_first_nonparm_insn ()
3933 {
3934 if (last_parm_insn)
3935 return NEXT_INSN (last_parm_insn);
3936 return get_insns ();
3937 }
3938
3939 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3940 Crash if there is none. */
3941
3942 rtx
3943 get_first_block_beg ()
3944 {
3945 register rtx searcher;
3946 register rtx insn = get_first_nonparm_insn ();
3947
3948 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3949 if (GET_CODE (searcher) == NOTE
3950 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3951 return searcher;
3952
3953 abort (); /* Invalid call to this function. (See comments above.) */
3954 return NULL_RTX;
3955 }
3956
3957 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3958 This means a type for which function calls must pass an address to the
3959 function or get an address back from the function.
3960 EXP may be a type node or an expression (whose type is tested). */
3961
3962 int
3963 aggregate_value_p (exp)
3964 tree exp;
3965 {
3966 int i, regno, nregs;
3967 rtx reg;
3968 tree type;
3969 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3970 type = exp;
3971 else
3972 type = TREE_TYPE (exp);
3973
3974 if (RETURN_IN_MEMORY (type))
3975 return 1;
3976 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3977 and thus can't be returned in registers. */
3978 if (TREE_ADDRESSABLE (type))
3979 return 1;
3980 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3981 return 1;
3982 /* Make sure we have suitable call-clobbered regs to return
3983 the value in; if not, we must return it in memory. */
3984 reg = hard_function_value (type, 0, 0);
3985
3986 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3987 it is OK. */
3988 if (GET_CODE (reg) != REG)
3989 return 0;
3990
3991 regno = REGNO (reg);
3992 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3993 for (i = 0; i < nregs; i++)
3994 if (! call_used_regs[regno + i])
3995 return 1;
3996 return 0;
3997 }
3998 \f
3999 /* Assign RTL expressions to the function's parameters.
4000 This may involve copying them into registers and using
4001 those registers as the RTL for them. */
4002
4003 void
4004 assign_parms (fndecl)
4005 tree fndecl;
4006 {
4007 register tree parm;
4008 register rtx entry_parm = 0;
4009 register rtx stack_parm = 0;
4010 CUMULATIVE_ARGS args_so_far;
4011 enum machine_mode promoted_mode, passed_mode;
4012 enum machine_mode nominal_mode, promoted_nominal_mode;
4013 int unsignedp;
4014 /* Total space needed so far for args on the stack,
4015 given as a constant and a tree-expression. */
4016 struct args_size stack_args_size;
4017 tree fntype = TREE_TYPE (fndecl);
4018 tree fnargs = DECL_ARGUMENTS (fndecl);
4019 /* This is used for the arg pointer when referring to stack args. */
4020 rtx internal_arg_pointer;
4021 /* This is a dummy PARM_DECL that we used for the function result if
4022 the function returns a structure. */
4023 tree function_result_decl = 0;
4024 #ifdef SETUP_INCOMING_VARARGS
4025 int varargs_setup = 0;
4026 #endif
4027 rtx conversion_insns = 0;
4028 struct args_size alignment_pad;
4029
4030 /* Nonzero if the last arg is named `__builtin_va_alist',
4031 which is used on some machines for old-fashioned non-ANSI varargs.h;
4032 this should be stuck onto the stack as if it had arrived there. */
4033 int hide_last_arg
4034 = (current_function_varargs
4035 && fnargs
4036 && (parm = tree_last (fnargs)) != 0
4037 && DECL_NAME (parm)
4038 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4039 "__builtin_va_alist")));
4040
4041 /* Nonzero if function takes extra anonymous args.
4042 This means the last named arg must be on the stack
4043 right before the anonymous ones. */
4044 int stdarg
4045 = (TYPE_ARG_TYPES (fntype) != 0
4046 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4047 != void_type_node));
4048
4049 current_function_stdarg = stdarg;
4050
4051 /* If the reg that the virtual arg pointer will be translated into is
4052 not a fixed reg or is the stack pointer, make a copy of the virtual
4053 arg pointer, and address parms via the copy. The frame pointer is
4054 considered fixed even though it is not marked as such.
4055
4056 The second time through, simply use ap to avoid generating rtx. */
4057
4058 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4059 || ! (fixed_regs[ARG_POINTER_REGNUM]
4060 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4061 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4062 else
4063 internal_arg_pointer = virtual_incoming_args_rtx;
4064 current_function_internal_arg_pointer = internal_arg_pointer;
4065
4066 stack_args_size.constant = 0;
4067 stack_args_size.var = 0;
4068
4069 /* If struct value address is treated as the first argument, make it so. */
4070 if (aggregate_value_p (DECL_RESULT (fndecl))
4071 && ! current_function_returns_pcc_struct
4072 && struct_value_incoming_rtx == 0)
4073 {
4074 tree type = build_pointer_type (TREE_TYPE (fntype));
4075
4076 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4077
4078 DECL_ARG_TYPE (function_result_decl) = type;
4079 TREE_CHAIN (function_result_decl) = fnargs;
4080 fnargs = function_result_decl;
4081 }
4082
4083 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4084 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4085
4086 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4087 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4088 #else
4089 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4090 #endif
4091
4092 /* We haven't yet found an argument that we must push and pretend the
4093 caller did. */
4094 current_function_pretend_args_size = 0;
4095
4096 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4097 {
4098 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4099 struct args_size stack_offset;
4100 struct args_size arg_size;
4101 int passed_pointer = 0;
4102 int did_conversion = 0;
4103 tree passed_type = DECL_ARG_TYPE (parm);
4104 tree nominal_type = TREE_TYPE (parm);
4105 int pretend_named;
4106
4107 /* Set LAST_NAMED if this is last named arg before some
4108 anonymous args. */
4109 int last_named = ((TREE_CHAIN (parm) == 0
4110 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4111 && (stdarg || current_function_varargs));
4112 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4113 most machines, if this is a varargs/stdarg function, then we treat
4114 the last named arg as if it were anonymous too. */
4115 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4116
4117 if (TREE_TYPE (parm) == error_mark_node
4118 /* This can happen after weird syntax errors
4119 or if an enum type is defined among the parms. */
4120 || TREE_CODE (parm) != PARM_DECL
4121 || passed_type == NULL)
4122 {
4123 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4124 = gen_rtx_MEM (BLKmode, const0_rtx);
4125 TREE_USED (parm) = 1;
4126 continue;
4127 }
4128
4129 /* For varargs.h function, save info about regs and stack space
4130 used by the individual args, not including the va_alist arg. */
4131 if (hide_last_arg && last_named)
4132 current_function_args_info = args_so_far;
4133
4134 /* Find mode of arg as it is passed, and mode of arg
4135 as it should be during execution of this function. */
4136 passed_mode = TYPE_MODE (passed_type);
4137 nominal_mode = TYPE_MODE (nominal_type);
4138
4139 /* If the parm's mode is VOID, its value doesn't matter,
4140 and avoid the usual things like emit_move_insn that could crash. */
4141 if (nominal_mode == VOIDmode)
4142 {
4143 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4144 continue;
4145 }
4146
4147 /* If the parm is to be passed as a transparent union, use the
4148 type of the first field for the tests below. We have already
4149 verified that the modes are the same. */
4150 if (DECL_TRANSPARENT_UNION (parm)
4151 || TYPE_TRANSPARENT_UNION (passed_type))
4152 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4153
4154 /* See if this arg was passed by invisible reference. It is if
4155 it is an object whose size depends on the contents of the
4156 object itself or if the machine requires these objects be passed
4157 that way. */
4158
4159 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4160 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4161 || TREE_ADDRESSABLE (passed_type)
4162 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4163 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4164 passed_type, named_arg)
4165 #endif
4166 )
4167 {
4168 passed_type = nominal_type = build_pointer_type (passed_type);
4169 passed_pointer = 1;
4170 passed_mode = nominal_mode = Pmode;
4171 }
4172
4173 promoted_mode = passed_mode;
4174
4175 #ifdef PROMOTE_FUNCTION_ARGS
4176 /* Compute the mode in which the arg is actually extended to. */
4177 unsignedp = TREE_UNSIGNED (passed_type);
4178 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4179 #endif
4180
4181 /* Let machine desc say which reg (if any) the parm arrives in.
4182 0 means it arrives on the stack. */
4183 #ifdef FUNCTION_INCOMING_ARG
4184 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4185 passed_type, named_arg);
4186 #else
4187 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4188 passed_type, named_arg);
4189 #endif
4190
4191 if (entry_parm == 0)
4192 promoted_mode = passed_mode;
4193
4194 #ifdef SETUP_INCOMING_VARARGS
4195 /* If this is the last named parameter, do any required setup for
4196 varargs or stdargs. We need to know about the case of this being an
4197 addressable type, in which case we skip the registers it
4198 would have arrived in.
4199
4200 For stdargs, LAST_NAMED will be set for two parameters, the one that
4201 is actually the last named, and the dummy parameter. We only
4202 want to do this action once.
4203
4204 Also, indicate when RTL generation is to be suppressed. */
4205 if (last_named && !varargs_setup)
4206 {
4207 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4208 current_function_pretend_args_size, 0);
4209 varargs_setup = 1;
4210 }
4211 #endif
4212
4213 /* Determine parm's home in the stack,
4214 in case it arrives in the stack or we should pretend it did.
4215
4216 Compute the stack position and rtx where the argument arrives
4217 and its size.
4218
4219 There is one complexity here: If this was a parameter that would
4220 have been passed in registers, but wasn't only because it is
4221 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4222 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4223 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4224 0 as it was the previous time. */
4225
4226 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4227 locate_and_pad_parm (promoted_mode, passed_type,
4228 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4229 1,
4230 #else
4231 #ifdef FUNCTION_INCOMING_ARG
4232 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4233 passed_type,
4234 pretend_named) != 0,
4235 #else
4236 FUNCTION_ARG (args_so_far, promoted_mode,
4237 passed_type,
4238 pretend_named) != 0,
4239 #endif
4240 #endif
4241 fndecl, &stack_args_size, &stack_offset, &arg_size,
4242 &alignment_pad);
4243
4244 {
4245 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4246
4247 if (offset_rtx == const0_rtx)
4248 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4249 else
4250 stack_parm = gen_rtx_MEM (promoted_mode,
4251 gen_rtx_PLUS (Pmode,
4252 internal_arg_pointer,
4253 offset_rtx));
4254
4255 /* If this is a memory ref that contains aggregate components,
4256 mark it as such for cse and loop optimize. Likewise if it
4257 is readonly. */
4258 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4259 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4260 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4261 }
4262
4263 /* If this parameter was passed both in registers and in the stack,
4264 use the copy on the stack. */
4265 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4266 entry_parm = 0;
4267
4268 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4269 /* If this parm was passed part in regs and part in memory,
4270 pretend it arrived entirely in memory
4271 by pushing the register-part onto the stack.
4272
4273 In the special case of a DImode or DFmode that is split,
4274 we could put it together in a pseudoreg directly,
4275 but for now that's not worth bothering with. */
4276
4277 if (entry_parm)
4278 {
4279 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4280 passed_type, named_arg);
4281
4282 if (nregs > 0)
4283 {
4284 current_function_pretend_args_size
4285 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4286 / (PARM_BOUNDARY / BITS_PER_UNIT)
4287 * (PARM_BOUNDARY / BITS_PER_UNIT));
4288
4289 /* Handle calls that pass values in multiple non-contiguous
4290 locations. The Irix 6 ABI has examples of this. */
4291 if (GET_CODE (entry_parm) == PARALLEL)
4292 emit_group_store (validize_mem (stack_parm), entry_parm,
4293 int_size_in_bytes (TREE_TYPE (parm)),
4294 (TYPE_ALIGN (TREE_TYPE (parm))
4295 / BITS_PER_UNIT));
4296 else
4297 move_block_from_reg (REGNO (entry_parm),
4298 validize_mem (stack_parm), nregs,
4299 int_size_in_bytes (TREE_TYPE (parm)));
4300
4301 entry_parm = stack_parm;
4302 }
4303 }
4304 #endif
4305
4306 /* If we didn't decide this parm came in a register,
4307 by default it came on the stack. */
4308 if (entry_parm == 0)
4309 entry_parm = stack_parm;
4310
4311 /* Record permanently how this parm was passed. */
4312 DECL_INCOMING_RTL (parm) = entry_parm;
4313
4314 /* If there is actually space on the stack for this parm,
4315 count it in stack_args_size; otherwise set stack_parm to 0
4316 to indicate there is no preallocated stack slot for the parm. */
4317
4318 if (entry_parm == stack_parm
4319 || (GET_CODE (entry_parm) == PARALLEL
4320 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4321 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4322 /* On some machines, even if a parm value arrives in a register
4323 there is still an (uninitialized) stack slot allocated for it.
4324
4325 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4326 whether this parameter already has a stack slot allocated,
4327 because an arg block exists only if current_function_args_size
4328 is larger than some threshold, and we haven't calculated that
4329 yet. So, for now, we just assume that stack slots never exist
4330 in this case. */
4331 || REG_PARM_STACK_SPACE (fndecl) > 0
4332 #endif
4333 )
4334 {
4335 stack_args_size.constant += arg_size.constant;
4336 if (arg_size.var)
4337 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4338 }
4339 else
4340 /* No stack slot was pushed for this parm. */
4341 stack_parm = 0;
4342
4343 /* Update info on where next arg arrives in registers. */
4344
4345 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4346 passed_type, named_arg);
4347
4348 /* If we can't trust the parm stack slot to be aligned enough
4349 for its ultimate type, don't use that slot after entry.
4350 We'll make another stack slot, if we need one. */
4351 {
4352 unsigned int thisparm_boundary
4353 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4354
4355 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4356 stack_parm = 0;
4357 }
4358
4359 /* If parm was passed in memory, and we need to convert it on entry,
4360 don't store it back in that same slot. */
4361 if (entry_parm != 0
4362 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4363 stack_parm = 0;
4364
4365 #if 0
4366 /* Now adjust STACK_PARM to the mode and precise location
4367 where this parameter should live during execution,
4368 if we discover that it must live in the stack during execution.
4369 To make debuggers happier on big-endian machines, we store
4370 the value in the last bytes of the space available. */
4371
4372 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4373 && stack_parm != 0)
4374 {
4375 rtx offset_rtx;
4376
4377 if (BYTES_BIG_ENDIAN
4378 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4379 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4380 - GET_MODE_SIZE (nominal_mode));
4381
4382 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4383 if (offset_rtx == const0_rtx)
4384 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4385 else
4386 stack_parm = gen_rtx_MEM (nominal_mode,
4387 gen_rtx_PLUS (Pmode,
4388 internal_arg_pointer,
4389 offset_rtx));
4390
4391 /* If this is a memory ref that contains aggregate components,
4392 mark it as such for cse and loop optimize. */
4393 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4394 }
4395 #endif /* 0 */
4396
4397 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4398 in the mode in which it arrives.
4399 STACK_PARM is an RTX for a stack slot where the parameter can live
4400 during the function (in case we want to put it there).
4401 STACK_PARM is 0 if no stack slot was pushed for it.
4402
4403 Now output code if necessary to convert ENTRY_PARM to
4404 the type in which this function declares it,
4405 and store that result in an appropriate place,
4406 which may be a pseudo reg, may be STACK_PARM,
4407 or may be a local stack slot if STACK_PARM is 0.
4408
4409 Set DECL_RTL to that place. */
4410
4411 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4412 {
4413 /* If a BLKmode arrives in registers, copy it to a stack slot.
4414 Handle calls that pass values in multiple non-contiguous
4415 locations. The Irix 6 ABI has examples of this. */
4416 if (GET_CODE (entry_parm) == REG
4417 || GET_CODE (entry_parm) == PARALLEL)
4418 {
4419 int size_stored
4420 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4421 UNITS_PER_WORD);
4422
4423 /* Note that we will be storing an integral number of words.
4424 So we have to be careful to ensure that we allocate an
4425 integral number of words. We do this below in the
4426 assign_stack_local if space was not allocated in the argument
4427 list. If it was, this will not work if PARM_BOUNDARY is not
4428 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4429 if it becomes a problem. */
4430
4431 if (stack_parm == 0)
4432 {
4433 stack_parm
4434 = assign_stack_local (GET_MODE (entry_parm),
4435 size_stored, 0);
4436
4437 /* If this is a memory ref that contains aggregate
4438 components, mark it as such for cse and loop optimize. */
4439 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4440 }
4441
4442 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4443 abort ();
4444
4445 if (TREE_READONLY (parm))
4446 RTX_UNCHANGING_P (stack_parm) = 1;
4447
4448 /* Handle calls that pass values in multiple non-contiguous
4449 locations. The Irix 6 ABI has examples of this. */
4450 if (GET_CODE (entry_parm) == PARALLEL)
4451 emit_group_store (validize_mem (stack_parm), entry_parm,
4452 int_size_in_bytes (TREE_TYPE (parm)),
4453 (TYPE_ALIGN (TREE_TYPE (parm))
4454 / BITS_PER_UNIT));
4455 else
4456 move_block_from_reg (REGNO (entry_parm),
4457 validize_mem (stack_parm),
4458 size_stored / UNITS_PER_WORD,
4459 int_size_in_bytes (TREE_TYPE (parm)));
4460 }
4461 DECL_RTL (parm) = stack_parm;
4462 }
4463 else if (! ((! optimize
4464 && ! DECL_REGISTER (parm)
4465 && ! DECL_INLINE (fndecl))
4466 /* layout_decl may set this. */
4467 || TREE_ADDRESSABLE (parm)
4468 || TREE_SIDE_EFFECTS (parm)
4469 /* If -ffloat-store specified, don't put explicit
4470 float variables into registers. */
4471 || (flag_float_store
4472 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4473 /* Always assign pseudo to structure return or item passed
4474 by invisible reference. */
4475 || passed_pointer || parm == function_result_decl)
4476 {
4477 /* Store the parm in a pseudoregister during the function, but we
4478 may need to do it in a wider mode. */
4479
4480 register rtx parmreg;
4481 int regno, regnoi = 0, regnor = 0;
4482
4483 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4484
4485 promoted_nominal_mode
4486 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4487
4488 parmreg = gen_reg_rtx (promoted_nominal_mode);
4489 mark_user_reg (parmreg);
4490
4491 /* If this was an item that we received a pointer to, set DECL_RTL
4492 appropriately. */
4493 if (passed_pointer)
4494 {
4495 DECL_RTL (parm)
4496 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4497 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4498 }
4499 else
4500 DECL_RTL (parm) = parmreg;
4501
4502 /* Copy the value into the register. */
4503 if (nominal_mode != passed_mode
4504 || promoted_nominal_mode != promoted_mode)
4505 {
4506 int save_tree_used;
4507 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4508 mode, by the caller. We now have to convert it to
4509 NOMINAL_MODE, if different. However, PARMREG may be in
4510 a different mode than NOMINAL_MODE if it is being stored
4511 promoted.
4512
4513 If ENTRY_PARM is a hard register, it might be in a register
4514 not valid for operating in its mode (e.g., an odd-numbered
4515 register for a DFmode). In that case, moves are the only
4516 thing valid, so we can't do a convert from there. This
4517 occurs when the calling sequence allow such misaligned
4518 usages.
4519
4520 In addition, the conversion may involve a call, which could
4521 clobber parameters which haven't been copied to pseudo
4522 registers yet. Therefore, we must first copy the parm to
4523 a pseudo reg here, and save the conversion until after all
4524 parameters have been moved. */
4525
4526 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4527
4528 emit_move_insn (tempreg, validize_mem (entry_parm));
4529
4530 push_to_sequence (conversion_insns);
4531 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4532
4533 /* TREE_USED gets set erroneously during expand_assignment. */
4534 save_tree_used = TREE_USED (parm);
4535 expand_assignment (parm,
4536 make_tree (nominal_type, tempreg), 0, 0);
4537 TREE_USED (parm) = save_tree_used;
4538 conversion_insns = get_insns ();
4539 did_conversion = 1;
4540 end_sequence ();
4541 }
4542 else
4543 emit_move_insn (parmreg, validize_mem (entry_parm));
4544
4545 /* If we were passed a pointer but the actual value
4546 can safely live in a register, put it in one. */
4547 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4548 && ! ((! optimize
4549 && ! DECL_REGISTER (parm)
4550 && ! DECL_INLINE (fndecl))
4551 /* layout_decl may set this. */
4552 || TREE_ADDRESSABLE (parm)
4553 || TREE_SIDE_EFFECTS (parm)
4554 /* If -ffloat-store specified, don't put explicit
4555 float variables into registers. */
4556 || (flag_float_store
4557 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4558 {
4559 /* We can't use nominal_mode, because it will have been set to
4560 Pmode above. We must use the actual mode of the parm. */
4561 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4562 mark_user_reg (parmreg);
4563 emit_move_insn (parmreg, DECL_RTL (parm));
4564 DECL_RTL (parm) = parmreg;
4565 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4566 now the parm. */
4567 stack_parm = 0;
4568 }
4569 #ifdef FUNCTION_ARG_CALLEE_COPIES
4570 /* If we are passed an arg by reference and it is our responsibility
4571 to make a copy, do it now.
4572 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4573 original argument, so we must recreate them in the call to
4574 FUNCTION_ARG_CALLEE_COPIES. */
4575 /* ??? Later add code to handle the case that if the argument isn't
4576 modified, don't do the copy. */
4577
4578 else if (passed_pointer
4579 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4580 TYPE_MODE (DECL_ARG_TYPE (parm)),
4581 DECL_ARG_TYPE (parm),
4582 named_arg)
4583 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4584 {
4585 rtx copy;
4586 tree type = DECL_ARG_TYPE (parm);
4587
4588 /* This sequence may involve a library call perhaps clobbering
4589 registers that haven't been copied to pseudos yet. */
4590
4591 push_to_sequence (conversion_insns);
4592
4593 if (TYPE_SIZE (type) == 0
4594 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4595 /* This is a variable sized object. */
4596 copy = gen_rtx_MEM (BLKmode,
4597 allocate_dynamic_stack_space
4598 (expr_size (parm), NULL_RTX,
4599 TYPE_ALIGN (type)));
4600 else
4601 copy = assign_stack_temp (TYPE_MODE (type),
4602 int_size_in_bytes (type), 1);
4603 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4604 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4605
4606 store_expr (parm, copy, 0);
4607 emit_move_insn (parmreg, XEXP (copy, 0));
4608 if (current_function_check_memory_usage)
4609 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4610 XEXP (copy, 0), Pmode,
4611 GEN_INT (int_size_in_bytes (type)),
4612 TYPE_MODE (sizetype),
4613 GEN_INT (MEMORY_USE_RW),
4614 TYPE_MODE (integer_type_node));
4615 conversion_insns = get_insns ();
4616 did_conversion = 1;
4617 end_sequence ();
4618 }
4619 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4620
4621 /* In any case, record the parm's desired stack location
4622 in case we later discover it must live in the stack.
4623
4624 If it is a COMPLEX value, store the stack location for both
4625 halves. */
4626
4627 if (GET_CODE (parmreg) == CONCAT)
4628 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4629 else
4630 regno = REGNO (parmreg);
4631
4632 if (regno >= max_parm_reg)
4633 {
4634 rtx *new;
4635 int old_max_parm_reg = max_parm_reg;
4636
4637 /* It's slow to expand this one register at a time,
4638 but it's also rare and we need max_parm_reg to be
4639 precisely correct. */
4640 max_parm_reg = regno + 1;
4641 new = (rtx *) xrealloc (parm_reg_stack_loc,
4642 max_parm_reg * sizeof (rtx));
4643 bzero ((char *) (new + old_max_parm_reg),
4644 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4645 parm_reg_stack_loc = new;
4646 }
4647
4648 if (GET_CODE (parmreg) == CONCAT)
4649 {
4650 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4651
4652 regnor = REGNO (gen_realpart (submode, parmreg));
4653 regnoi = REGNO (gen_imagpart (submode, parmreg));
4654
4655 if (stack_parm != 0)
4656 {
4657 parm_reg_stack_loc[regnor]
4658 = gen_realpart (submode, stack_parm);
4659 parm_reg_stack_loc[regnoi]
4660 = gen_imagpart (submode, stack_parm);
4661 }
4662 else
4663 {
4664 parm_reg_stack_loc[regnor] = 0;
4665 parm_reg_stack_loc[regnoi] = 0;
4666 }
4667 }
4668 else
4669 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4670
4671 /* Mark the register as eliminable if we did no conversion
4672 and it was copied from memory at a fixed offset,
4673 and the arg pointer was not copied to a pseudo-reg.
4674 If the arg pointer is a pseudo reg or the offset formed
4675 an invalid address, such memory-equivalences
4676 as we make here would screw up life analysis for it. */
4677 if (nominal_mode == passed_mode
4678 && ! did_conversion
4679 && stack_parm != 0
4680 && GET_CODE (stack_parm) == MEM
4681 && stack_offset.var == 0
4682 && reg_mentioned_p (virtual_incoming_args_rtx,
4683 XEXP (stack_parm, 0)))
4684 {
4685 rtx linsn = get_last_insn ();
4686 rtx sinsn, set;
4687
4688 /* Mark complex types separately. */
4689 if (GET_CODE (parmreg) == CONCAT)
4690 /* Scan backwards for the set of the real and
4691 imaginary parts. */
4692 for (sinsn = linsn; sinsn != 0;
4693 sinsn = prev_nonnote_insn (sinsn))
4694 {
4695 set = single_set (sinsn);
4696 if (set != 0
4697 && SET_DEST (set) == regno_reg_rtx [regnoi])
4698 REG_NOTES (sinsn)
4699 = gen_rtx_EXPR_LIST (REG_EQUIV,
4700 parm_reg_stack_loc[regnoi],
4701 REG_NOTES (sinsn));
4702 else if (set != 0
4703 && SET_DEST (set) == regno_reg_rtx [regnor])
4704 REG_NOTES (sinsn)
4705 = gen_rtx_EXPR_LIST (REG_EQUIV,
4706 parm_reg_stack_loc[regnor],
4707 REG_NOTES (sinsn));
4708 }
4709 else if ((set = single_set (linsn)) != 0
4710 && SET_DEST (set) == parmreg)
4711 REG_NOTES (linsn)
4712 = gen_rtx_EXPR_LIST (REG_EQUIV,
4713 stack_parm, REG_NOTES (linsn));
4714 }
4715
4716 /* For pointer data type, suggest pointer register. */
4717 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4718 mark_reg_pointer (parmreg,
4719 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4720 / BITS_PER_UNIT));
4721 }
4722 else
4723 {
4724 /* Value must be stored in the stack slot STACK_PARM
4725 during function execution. */
4726
4727 if (promoted_mode != nominal_mode)
4728 {
4729 /* Conversion is required. */
4730 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4731
4732 emit_move_insn (tempreg, validize_mem (entry_parm));
4733
4734 push_to_sequence (conversion_insns);
4735 entry_parm = convert_to_mode (nominal_mode, tempreg,
4736 TREE_UNSIGNED (TREE_TYPE (parm)));
4737 if (stack_parm)
4738 {
4739 /* ??? This may need a big-endian conversion on sparc64. */
4740 stack_parm = change_address (stack_parm, nominal_mode,
4741 NULL_RTX);
4742 }
4743 conversion_insns = get_insns ();
4744 did_conversion = 1;
4745 end_sequence ();
4746 }
4747
4748 if (entry_parm != stack_parm)
4749 {
4750 if (stack_parm == 0)
4751 {
4752 stack_parm
4753 = assign_stack_local (GET_MODE (entry_parm),
4754 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4755 /* If this is a memory ref that contains aggregate components,
4756 mark it as such for cse and loop optimize. */
4757 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4758 }
4759
4760 if (promoted_mode != nominal_mode)
4761 {
4762 push_to_sequence (conversion_insns);
4763 emit_move_insn (validize_mem (stack_parm),
4764 validize_mem (entry_parm));
4765 conversion_insns = get_insns ();
4766 end_sequence ();
4767 }
4768 else
4769 emit_move_insn (validize_mem (stack_parm),
4770 validize_mem (entry_parm));
4771 }
4772 if (current_function_check_memory_usage)
4773 {
4774 push_to_sequence (conversion_insns);
4775 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4776 XEXP (stack_parm, 0), Pmode,
4777 GEN_INT (GET_MODE_SIZE (GET_MODE
4778 (entry_parm))),
4779 TYPE_MODE (sizetype),
4780 GEN_INT (MEMORY_USE_RW),
4781 TYPE_MODE (integer_type_node));
4782
4783 conversion_insns = get_insns ();
4784 end_sequence ();
4785 }
4786 DECL_RTL (parm) = stack_parm;
4787 }
4788
4789 /* If this "parameter" was the place where we are receiving the
4790 function's incoming structure pointer, set up the result. */
4791 if (parm == function_result_decl)
4792 {
4793 tree result = DECL_RESULT (fndecl);
4794 tree restype = TREE_TYPE (result);
4795
4796 DECL_RTL (result)
4797 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4798
4799 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4800 AGGREGATE_TYPE_P (restype));
4801 }
4802
4803 if (TREE_THIS_VOLATILE (parm))
4804 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4805 if (TREE_READONLY (parm))
4806 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4807 }
4808
4809 /* Output all parameter conversion instructions (possibly including calls)
4810 now that all parameters have been copied out of hard registers. */
4811 emit_insns (conversion_insns);
4812
4813 last_parm_insn = get_last_insn ();
4814
4815 current_function_args_size = stack_args_size.constant;
4816
4817 /* Adjust function incoming argument size for alignment and
4818 minimum length. */
4819
4820 #ifdef REG_PARM_STACK_SPACE
4821 #ifndef MAYBE_REG_PARM_STACK_SPACE
4822 current_function_args_size = MAX (current_function_args_size,
4823 REG_PARM_STACK_SPACE (fndecl));
4824 #endif
4825 #endif
4826
4827 #ifdef STACK_BOUNDARY
4828 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4829
4830 current_function_args_size
4831 = ((current_function_args_size + STACK_BYTES - 1)
4832 / STACK_BYTES) * STACK_BYTES;
4833 #endif
4834
4835 #ifdef ARGS_GROW_DOWNWARD
4836 current_function_arg_offset_rtx
4837 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4838 : expand_expr (size_diffop (stack_args_size.var,
4839 size_int (-stack_args_size.constant)),
4840 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4841 #else
4842 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4843 #endif
4844
4845 /* See how many bytes, if any, of its args a function should try to pop
4846 on return. */
4847
4848 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4849 current_function_args_size);
4850
4851 /* For stdarg.h function, save info about
4852 regs and stack space used by the named args. */
4853
4854 if (!hide_last_arg)
4855 current_function_args_info = args_so_far;
4856
4857 /* Set the rtx used for the function return value. Put this in its
4858 own variable so any optimizers that need this information don't have
4859 to include tree.h. Do this here so it gets done when an inlined
4860 function gets output. */
4861
4862 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4863 }
4864 \f
4865 /* Indicate whether REGNO is an incoming argument to the current function
4866 that was promoted to a wider mode. If so, return the RTX for the
4867 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4868 that REGNO is promoted from and whether the promotion was signed or
4869 unsigned. */
4870
4871 #ifdef PROMOTE_FUNCTION_ARGS
4872
4873 rtx
4874 promoted_input_arg (regno, pmode, punsignedp)
4875 int regno;
4876 enum machine_mode *pmode;
4877 int *punsignedp;
4878 {
4879 tree arg;
4880
4881 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4882 arg = TREE_CHAIN (arg))
4883 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4884 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4885 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4886 {
4887 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4888 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4889
4890 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4891 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4892 && mode != DECL_MODE (arg))
4893 {
4894 *pmode = DECL_MODE (arg);
4895 *punsignedp = unsignedp;
4896 return DECL_INCOMING_RTL (arg);
4897 }
4898 }
4899
4900 return 0;
4901 }
4902
4903 #endif
4904 \f
4905 /* Compute the size and offset from the start of the stacked arguments for a
4906 parm passed in mode PASSED_MODE and with type TYPE.
4907
4908 INITIAL_OFFSET_PTR points to the current offset into the stacked
4909 arguments.
4910
4911 The starting offset and size for this parm are returned in *OFFSET_PTR
4912 and *ARG_SIZE_PTR, respectively.
4913
4914 IN_REGS is non-zero if the argument will be passed in registers. It will
4915 never be set if REG_PARM_STACK_SPACE is not defined.
4916
4917 FNDECL is the function in which the argument was defined.
4918
4919 There are two types of rounding that are done. The first, controlled by
4920 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4921 list to be aligned to the specific boundary (in bits). This rounding
4922 affects the initial and starting offsets, but not the argument size.
4923
4924 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4925 optionally rounds the size of the parm to PARM_BOUNDARY. The
4926 initial offset is not affected by this rounding, while the size always
4927 is and the starting offset may be. */
4928
4929 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4930 initial_offset_ptr is positive because locate_and_pad_parm's
4931 callers pass in the total size of args so far as
4932 initial_offset_ptr. arg_size_ptr is always positive.*/
4933
4934 void
4935 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4936 initial_offset_ptr, offset_ptr, arg_size_ptr,
4937 alignment_pad)
4938 enum machine_mode passed_mode;
4939 tree type;
4940 int in_regs ATTRIBUTE_UNUSED;
4941 tree fndecl ATTRIBUTE_UNUSED;
4942 struct args_size *initial_offset_ptr;
4943 struct args_size *offset_ptr;
4944 struct args_size *arg_size_ptr;
4945 struct args_size *alignment_pad;
4946
4947 {
4948 tree sizetree
4949 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4950 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4951 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4952
4953 #ifdef REG_PARM_STACK_SPACE
4954 /* If we have found a stack parm before we reach the end of the
4955 area reserved for registers, skip that area. */
4956 if (! in_regs)
4957 {
4958 int reg_parm_stack_space = 0;
4959
4960 #ifdef MAYBE_REG_PARM_STACK_SPACE
4961 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4962 #else
4963 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4964 #endif
4965 if (reg_parm_stack_space > 0)
4966 {
4967 if (initial_offset_ptr->var)
4968 {
4969 initial_offset_ptr->var
4970 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4971 ssize_int (reg_parm_stack_space));
4972 initial_offset_ptr->constant = 0;
4973 }
4974 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4975 initial_offset_ptr->constant = reg_parm_stack_space;
4976 }
4977 }
4978 #endif /* REG_PARM_STACK_SPACE */
4979
4980 arg_size_ptr->var = 0;
4981 arg_size_ptr->constant = 0;
4982
4983 #ifdef ARGS_GROW_DOWNWARD
4984 if (initial_offset_ptr->var)
4985 {
4986 offset_ptr->constant = 0;
4987 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
4988 initial_offset_ptr->var);
4989 }
4990 else
4991 {
4992 offset_ptr->constant = - initial_offset_ptr->constant;
4993 offset_ptr->var = 0;
4994 }
4995 if (where_pad != none
4996 && (TREE_CODE (sizetree) != INTEGER_CST
4997 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4998 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4999 SUB_PARM_SIZE (*offset_ptr, sizetree);
5000 if (where_pad != downward)
5001 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5002 if (initial_offset_ptr->var)
5003 arg_size_ptr->var = size_binop (MINUS_EXPR,
5004 size_binop (MINUS_EXPR,
5005 ssize_int (0),
5006 initial_offset_ptr->var),
5007 offset_ptr->var);
5008
5009 else
5010 arg_size_ptr->constant = (- initial_offset_ptr->constant
5011 - offset_ptr->constant);
5012
5013 #else /* !ARGS_GROW_DOWNWARD */
5014 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5015 *offset_ptr = *initial_offset_ptr;
5016
5017 #ifdef PUSH_ROUNDING
5018 if (passed_mode != BLKmode)
5019 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5020 #endif
5021
5022 /* Pad_below needs the pre-rounded size to know how much to pad below
5023 so this must be done before rounding up. */
5024 if (where_pad == downward
5025 /* However, BLKmode args passed in regs have their padding done elsewhere.
5026 The stack slot must be able to hold the entire register. */
5027 && !(in_regs && passed_mode == BLKmode))
5028 pad_below (offset_ptr, passed_mode, sizetree);
5029
5030 if (where_pad != none
5031 && (TREE_CODE (sizetree) != INTEGER_CST
5032 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5033 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5034
5035 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5036 #endif /* ARGS_GROW_DOWNWARD */
5037 }
5038
5039 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5040 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5041
5042 static void
5043 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5044 struct args_size *offset_ptr;
5045 int boundary;
5046 struct args_size *alignment_pad;
5047 {
5048 tree save_var = NULL_TREE;
5049 HOST_WIDE_INT save_constant = 0;
5050
5051 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5052
5053 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5054 {
5055 save_var = offset_ptr->var;
5056 save_constant = offset_ptr->constant;
5057 }
5058
5059 alignment_pad->var = NULL_TREE;
5060 alignment_pad->constant = 0;
5061
5062 if (boundary > BITS_PER_UNIT)
5063 {
5064 if (offset_ptr->var)
5065 {
5066 offset_ptr->var =
5067 #ifdef ARGS_GROW_DOWNWARD
5068 round_down
5069 #else
5070 round_up
5071 #endif
5072 (ARGS_SIZE_TREE (*offset_ptr),
5073 boundary / BITS_PER_UNIT);
5074 offset_ptr->constant = 0; /*?*/
5075 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5076 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5077 save_var);
5078 }
5079 else
5080 {
5081 offset_ptr->constant =
5082 #ifdef ARGS_GROW_DOWNWARD
5083 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5084 #else
5085 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5086 #endif
5087 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5088 alignment_pad->constant = offset_ptr->constant - save_constant;
5089 }
5090 }
5091 }
5092
5093 #ifndef ARGS_GROW_DOWNWARD
5094 static void
5095 pad_below (offset_ptr, passed_mode, sizetree)
5096 struct args_size *offset_ptr;
5097 enum machine_mode passed_mode;
5098 tree sizetree;
5099 {
5100 if (passed_mode != BLKmode)
5101 {
5102 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5103 offset_ptr->constant
5104 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5105 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5106 - GET_MODE_SIZE (passed_mode));
5107 }
5108 else
5109 {
5110 if (TREE_CODE (sizetree) != INTEGER_CST
5111 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5112 {
5113 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5114 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5115 /* Add it in. */
5116 ADD_PARM_SIZE (*offset_ptr, s2);
5117 SUB_PARM_SIZE (*offset_ptr, sizetree);
5118 }
5119 }
5120 }
5121 #endif
5122 \f
5123 /* Walk the tree of blocks describing the binding levels within a function
5124 and warn about uninitialized variables.
5125 This is done after calling flow_analysis and before global_alloc
5126 clobbers the pseudo-regs to hard regs. */
5127
5128 void
5129 uninitialized_vars_warning (block)
5130 tree block;
5131 {
5132 register tree decl, sub;
5133 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5134 {
5135 if (warn_uninitialized
5136 && TREE_CODE (decl) == VAR_DECL
5137 /* These warnings are unreliable for and aggregates
5138 because assigning the fields one by one can fail to convince
5139 flow.c that the entire aggregate was initialized.
5140 Unions are troublesome because members may be shorter. */
5141 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5142 && DECL_RTL (decl) != 0
5143 && GET_CODE (DECL_RTL (decl)) == REG
5144 /* Global optimizations can make it difficult to determine if a
5145 particular variable has been initialized. However, a VAR_DECL
5146 with a nonzero DECL_INITIAL had an initializer, so do not
5147 claim it is potentially uninitialized.
5148
5149 We do not care about the actual value in DECL_INITIAL, so we do
5150 not worry that it may be a dangling pointer. */
5151 && DECL_INITIAL (decl) == NULL_TREE
5152 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5153 warning_with_decl (decl,
5154 "`%s' might be used uninitialized in this function");
5155 if (extra_warnings
5156 && TREE_CODE (decl) == VAR_DECL
5157 && DECL_RTL (decl) != 0
5158 && GET_CODE (DECL_RTL (decl)) == REG
5159 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5160 warning_with_decl (decl,
5161 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5162 }
5163 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5164 uninitialized_vars_warning (sub);
5165 }
5166
5167 /* Do the appropriate part of uninitialized_vars_warning
5168 but for arguments instead of local variables. */
5169
5170 void
5171 setjmp_args_warning ()
5172 {
5173 register tree decl;
5174 for (decl = DECL_ARGUMENTS (current_function_decl);
5175 decl; decl = TREE_CHAIN (decl))
5176 if (DECL_RTL (decl) != 0
5177 && GET_CODE (DECL_RTL (decl)) == REG
5178 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5179 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5180 }
5181
5182 /* If this function call setjmp, put all vars into the stack
5183 unless they were declared `register'. */
5184
5185 void
5186 setjmp_protect (block)
5187 tree block;
5188 {
5189 register tree decl, sub;
5190 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5191 if ((TREE_CODE (decl) == VAR_DECL
5192 || TREE_CODE (decl) == PARM_DECL)
5193 && DECL_RTL (decl) != 0
5194 && (GET_CODE (DECL_RTL (decl)) == REG
5195 || (GET_CODE (DECL_RTL (decl)) == MEM
5196 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5197 /* If this variable came from an inline function, it must be
5198 that its life doesn't overlap the setjmp. If there was a
5199 setjmp in the function, it would already be in memory. We
5200 must exclude such variable because their DECL_RTL might be
5201 set to strange things such as virtual_stack_vars_rtx. */
5202 && ! DECL_FROM_INLINE (decl)
5203 && (
5204 #ifdef NON_SAVING_SETJMP
5205 /* If longjmp doesn't restore the registers,
5206 don't put anything in them. */
5207 NON_SAVING_SETJMP
5208 ||
5209 #endif
5210 ! DECL_REGISTER (decl)))
5211 put_var_into_stack (decl);
5212 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5213 setjmp_protect (sub);
5214 }
5215 \f
5216 /* Like the previous function, but for args instead of local variables. */
5217
5218 void
5219 setjmp_protect_args ()
5220 {
5221 register tree decl;
5222 for (decl = DECL_ARGUMENTS (current_function_decl);
5223 decl; decl = TREE_CHAIN (decl))
5224 if ((TREE_CODE (decl) == VAR_DECL
5225 || TREE_CODE (decl) == PARM_DECL)
5226 && DECL_RTL (decl) != 0
5227 && (GET_CODE (DECL_RTL (decl)) == REG
5228 || (GET_CODE (DECL_RTL (decl)) == MEM
5229 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5230 && (
5231 /* If longjmp doesn't restore the registers,
5232 don't put anything in them. */
5233 #ifdef NON_SAVING_SETJMP
5234 NON_SAVING_SETJMP
5235 ||
5236 #endif
5237 ! DECL_REGISTER (decl)))
5238 put_var_into_stack (decl);
5239 }
5240 \f
5241 /* Return the context-pointer register corresponding to DECL,
5242 or 0 if it does not need one. */
5243
5244 rtx
5245 lookup_static_chain (decl)
5246 tree decl;
5247 {
5248 tree context = decl_function_context (decl);
5249 tree link;
5250
5251 if (context == 0
5252 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5253 return 0;
5254
5255 /* We treat inline_function_decl as an alias for the current function
5256 because that is the inline function whose vars, types, etc.
5257 are being merged into the current function.
5258 See expand_inline_function. */
5259 if (context == current_function_decl || context == inline_function_decl)
5260 return virtual_stack_vars_rtx;
5261
5262 for (link = context_display; link; link = TREE_CHAIN (link))
5263 if (TREE_PURPOSE (link) == context)
5264 return RTL_EXPR_RTL (TREE_VALUE (link));
5265
5266 abort ();
5267 }
5268 \f
5269 /* Convert a stack slot address ADDR for variable VAR
5270 (from a containing function)
5271 into an address valid in this function (using a static chain). */
5272
5273 rtx
5274 fix_lexical_addr (addr, var)
5275 rtx addr;
5276 tree var;
5277 {
5278 rtx basereg;
5279 HOST_WIDE_INT displacement;
5280 tree context = decl_function_context (var);
5281 struct function *fp;
5282 rtx base = 0;
5283
5284 /* If this is the present function, we need not do anything. */
5285 if (context == current_function_decl || context == inline_function_decl)
5286 return addr;
5287
5288 for (fp = outer_function_chain; fp; fp = fp->next)
5289 if (fp->decl == context)
5290 break;
5291
5292 if (fp == 0)
5293 abort ();
5294
5295 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5296 addr = XEXP (XEXP (addr, 0), 0);
5297
5298 /* Decode given address as base reg plus displacement. */
5299 if (GET_CODE (addr) == REG)
5300 basereg = addr, displacement = 0;
5301 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5302 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5303 else
5304 abort ();
5305
5306 /* We accept vars reached via the containing function's
5307 incoming arg pointer and via its stack variables pointer. */
5308 if (basereg == fp->internal_arg_pointer)
5309 {
5310 /* If reached via arg pointer, get the arg pointer value
5311 out of that function's stack frame.
5312
5313 There are two cases: If a separate ap is needed, allocate a
5314 slot in the outer function for it and dereference it that way.
5315 This is correct even if the real ap is actually a pseudo.
5316 Otherwise, just adjust the offset from the frame pointer to
5317 compensate. */
5318
5319 #ifdef NEED_SEPARATE_AP
5320 rtx addr;
5321
5322 if (fp->x_arg_pointer_save_area == 0)
5323 fp->x_arg_pointer_save_area
5324 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5325
5326 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5327 addr = memory_address (Pmode, addr);
5328
5329 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5330 #else
5331 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5332 base = lookup_static_chain (var);
5333 #endif
5334 }
5335
5336 else if (basereg == virtual_stack_vars_rtx)
5337 {
5338 /* This is the same code as lookup_static_chain, duplicated here to
5339 avoid an extra call to decl_function_context. */
5340 tree link;
5341
5342 for (link = context_display; link; link = TREE_CHAIN (link))
5343 if (TREE_PURPOSE (link) == context)
5344 {
5345 base = RTL_EXPR_RTL (TREE_VALUE (link));
5346 break;
5347 }
5348 }
5349
5350 if (base == 0)
5351 abort ();
5352
5353 /* Use same offset, relative to appropriate static chain or argument
5354 pointer. */
5355 return plus_constant (base, displacement);
5356 }
5357 \f
5358 /* Return the address of the trampoline for entering nested fn FUNCTION.
5359 If necessary, allocate a trampoline (in the stack frame)
5360 and emit rtl to initialize its contents (at entry to this function). */
5361
5362 rtx
5363 trampoline_address (function)
5364 tree function;
5365 {
5366 tree link;
5367 tree rtlexp;
5368 rtx tramp;
5369 struct function *fp;
5370 tree fn_context;
5371
5372 /* Find an existing trampoline and return it. */
5373 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5374 if (TREE_PURPOSE (link) == function)
5375 return
5376 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5377
5378 for (fp = outer_function_chain; fp; fp = fp->next)
5379 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5380 if (TREE_PURPOSE (link) == function)
5381 {
5382 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5383 function);
5384 return round_trampoline_addr (tramp);
5385 }
5386
5387 /* None exists; we must make one. */
5388
5389 /* Find the `struct function' for the function containing FUNCTION. */
5390 fp = 0;
5391 fn_context = decl_function_context (function);
5392 if (fn_context != current_function_decl
5393 && fn_context != inline_function_decl)
5394 for (fp = outer_function_chain; fp; fp = fp->next)
5395 if (fp->decl == fn_context)
5396 break;
5397
5398 /* Allocate run-time space for this trampoline
5399 (usually in the defining function's stack frame). */
5400 #ifdef ALLOCATE_TRAMPOLINE
5401 tramp = ALLOCATE_TRAMPOLINE (fp);
5402 #else
5403 /* If rounding needed, allocate extra space
5404 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5405 #ifdef TRAMPOLINE_ALIGNMENT
5406 #define TRAMPOLINE_REAL_SIZE \
5407 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5408 #else
5409 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5410 #endif
5411 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5412 fp ? fp : cfun);
5413 #endif
5414
5415 /* Record the trampoline for reuse and note it for later initialization
5416 by expand_function_end. */
5417 if (fp != 0)
5418 {
5419 push_obstacks (fp->function_maybepermanent_obstack,
5420 fp->function_maybepermanent_obstack);
5421 rtlexp = make_node (RTL_EXPR);
5422 RTL_EXPR_RTL (rtlexp) = tramp;
5423 fp->x_trampoline_list = tree_cons (function, rtlexp,
5424 fp->x_trampoline_list);
5425 pop_obstacks ();
5426 }
5427 else
5428 {
5429 /* Make the RTL_EXPR node temporary, not momentary, so that the
5430 trampoline_list doesn't become garbage. */
5431 int momentary = suspend_momentary ();
5432 rtlexp = make_node (RTL_EXPR);
5433 resume_momentary (momentary);
5434
5435 RTL_EXPR_RTL (rtlexp) = tramp;
5436 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5437 }
5438
5439 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5440 return round_trampoline_addr (tramp);
5441 }
5442
5443 /* Given a trampoline address,
5444 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5445
5446 static rtx
5447 round_trampoline_addr (tramp)
5448 rtx tramp;
5449 {
5450 #ifdef TRAMPOLINE_ALIGNMENT
5451 /* Round address up to desired boundary. */
5452 rtx temp = gen_reg_rtx (Pmode);
5453 temp = expand_binop (Pmode, add_optab, tramp,
5454 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5455 temp, 0, OPTAB_LIB_WIDEN);
5456 tramp = expand_binop (Pmode, and_optab, temp,
5457 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5458 temp, 0, OPTAB_LIB_WIDEN);
5459 #endif
5460 return tramp;
5461 }
5462 \f
5463 /* The functions identify_blocks and reorder_blocks provide a way to
5464 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5465 duplicate portions of the RTL code. Call identify_blocks before
5466 changing the RTL, and call reorder_blocks after. */
5467
5468 /* Put all this function's BLOCK nodes including those that are chained
5469 onto the first block into a vector, and return it.
5470 Also store in each NOTE for the beginning or end of a block
5471 the index of that block in the vector.
5472 The arguments are BLOCK, the chain of top-level blocks of the function,
5473 and INSNS, the insn chain of the function. */
5474
5475 void
5476 identify_blocks (block, insns)
5477 tree block;
5478 rtx insns;
5479 {
5480 int n_blocks;
5481 tree *block_vector;
5482 tree *block_stack;
5483 int depth = 0;
5484 int current_block_number = 1;
5485 rtx insn;
5486
5487 if (block == 0)
5488 return;
5489
5490 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5491 depth-first order. */
5492 n_blocks = all_blocks (block, 0);
5493 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5494 all_blocks (block, block_vector);
5495
5496 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5497
5498 for (insn = insns; insn; insn = NEXT_INSN (insn))
5499 if (GET_CODE (insn) == NOTE)
5500 {
5501 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5502 {
5503 tree b;
5504
5505 /* If there are more block notes than BLOCKs, something
5506 is badly wrong. */
5507 if (current_block_number == n_blocks)
5508 abort ();
5509
5510 b = block_vector[current_block_number++];
5511 NOTE_BLOCK (insn) = b;
5512 block_stack[depth++] = b;
5513 }
5514 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5515 {
5516 if (depth == 0)
5517 /* There are more NOTE_INSN_BLOCK_ENDs that
5518 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5519 abort ();
5520
5521 NOTE_BLOCK (insn) = block_stack[--depth];
5522 }
5523 }
5524
5525 /* In whole-function mode, we might not have seen the whole function
5526 yet, so we might not use up all the blocks. */
5527 if (n_blocks != current_block_number
5528 && !cfun->x_whole_function_mode_p)
5529 abort ();
5530
5531 free (block_vector);
5532 free (block_stack);
5533 }
5534
5535 /* Given a revised instruction chain, rebuild the tree structure of
5536 BLOCK nodes to correspond to the new order of RTL. The new block
5537 tree is inserted below TOP_BLOCK. Returns the current top-level
5538 block. */
5539
5540 tree
5541 reorder_blocks (block, insns)
5542 tree block;
5543 rtx insns;
5544 {
5545 tree current_block = block;
5546 rtx insn;
5547
5548 if (block == NULL_TREE)
5549 return NULL_TREE;
5550
5551 /* Prune the old trees away, so that it doesn't get in the way. */
5552 BLOCK_SUBBLOCKS (current_block) = 0;
5553 BLOCK_CHAIN (current_block) = 0;
5554
5555 for (insn = insns; insn; insn = NEXT_INSN (insn))
5556 if (GET_CODE (insn) == NOTE)
5557 {
5558 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5559 {
5560 tree block = NOTE_BLOCK (insn);
5561 /* If we have seen this block before, copy it. */
5562 if (TREE_ASM_WRITTEN (block))
5563 block = copy_node (block);
5564 BLOCK_SUBBLOCKS (block) = 0;
5565 TREE_ASM_WRITTEN (block) = 1;
5566 BLOCK_SUPERCONTEXT (block) = current_block;
5567 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5568 BLOCK_SUBBLOCKS (current_block) = block;
5569 current_block = block;
5570 }
5571 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5572 {
5573 BLOCK_SUBBLOCKS (current_block)
5574 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5575 current_block = BLOCK_SUPERCONTEXT (current_block);
5576 }
5577 }
5578
5579 BLOCK_SUBBLOCKS (current_block)
5580 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5581 return current_block;
5582 }
5583
5584 /* Reverse the order of elements in the chain T of blocks,
5585 and return the new head of the chain (old last element). */
5586
5587 static tree
5588 blocks_nreverse (t)
5589 tree t;
5590 {
5591 register tree prev = 0, decl, next;
5592 for (decl = t; decl; decl = next)
5593 {
5594 next = BLOCK_CHAIN (decl);
5595 BLOCK_CHAIN (decl) = prev;
5596 prev = decl;
5597 }
5598 return prev;
5599 }
5600
5601 /* Count the subblocks of the list starting with BLOCK, and list them
5602 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5603 blocks. */
5604
5605 static int
5606 all_blocks (block, vector)
5607 tree block;
5608 tree *vector;
5609 {
5610 int n_blocks = 0;
5611
5612 while (block)
5613 {
5614 TREE_ASM_WRITTEN (block) = 0;
5615
5616 /* Record this block. */
5617 if (vector)
5618 vector[n_blocks] = block;
5619
5620 ++n_blocks;
5621
5622 /* Record the subblocks, and their subblocks... */
5623 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5624 vector ? vector + n_blocks : 0);
5625 block = BLOCK_CHAIN (block);
5626 }
5627
5628 return n_blocks;
5629 }
5630 \f
5631 /* Allocate a function structure and reset its contents to the defaults. */
5632 static void
5633 prepare_function_start ()
5634 {
5635 cfun = (struct function *) xcalloc (1, sizeof (struct function));
5636
5637 init_stmt_for_function ();
5638 init_eh_for_function ();
5639
5640 cse_not_expected = ! optimize;
5641
5642 /* Caller save not needed yet. */
5643 caller_save_needed = 0;
5644
5645 /* No stack slots have been made yet. */
5646 stack_slot_list = 0;
5647
5648 current_function_has_nonlocal_label = 0;
5649 current_function_has_nonlocal_goto = 0;
5650
5651 /* There is no stack slot for handling nonlocal gotos. */
5652 nonlocal_goto_handler_slots = 0;
5653 nonlocal_goto_stack_level = 0;
5654
5655 /* No labels have been declared for nonlocal use. */
5656 nonlocal_labels = 0;
5657 nonlocal_goto_handler_labels = 0;
5658
5659 /* No function calls so far in this function. */
5660 function_call_count = 0;
5661
5662 /* No parm regs have been allocated.
5663 (This is important for output_inline_function.) */
5664 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5665
5666 /* Initialize the RTL mechanism. */
5667 init_emit ();
5668
5669 /* Initialize the queue of pending postincrement and postdecrements,
5670 and some other info in expr.c. */
5671 init_expr ();
5672
5673 /* We haven't done register allocation yet. */
5674 reg_renumber = 0;
5675
5676 init_varasm_status (cfun);
5677
5678 /* Clear out data used for inlining. */
5679 cfun->inlinable = 0;
5680 cfun->original_decl_initial = 0;
5681 cfun->original_arg_vector = 0;
5682
5683 cfun->stack_alignment_needed = 0;
5684 #ifdef STACK_BOUNDARY
5685 cfun->preferred_stack_boundary = STACK_BOUNDARY;
5686 #endif
5687
5688 /* Set if a call to setjmp is seen. */
5689 current_function_calls_setjmp = 0;
5690
5691 /* Set if a call to longjmp is seen. */
5692 current_function_calls_longjmp = 0;
5693
5694 current_function_calls_alloca = 0;
5695 current_function_contains_functions = 0;
5696 current_function_is_leaf = 0;
5697 current_function_sp_is_unchanging = 0;
5698 current_function_uses_only_leaf_regs = 0;
5699 current_function_has_computed_jump = 0;
5700 current_function_is_thunk = 0;
5701
5702 current_function_returns_pcc_struct = 0;
5703 current_function_returns_struct = 0;
5704 current_function_epilogue_delay_list = 0;
5705 current_function_uses_const_pool = 0;
5706 current_function_uses_pic_offset_table = 0;
5707 current_function_cannot_inline = 0;
5708
5709 /* We have not yet needed to make a label to jump to for tail-recursion. */
5710 tail_recursion_label = 0;
5711
5712 /* We haven't had a need to make a save area for ap yet. */
5713 arg_pointer_save_area = 0;
5714
5715 /* No stack slots allocated yet. */
5716 frame_offset = 0;
5717
5718 /* No SAVE_EXPRs in this function yet. */
5719 save_expr_regs = 0;
5720
5721 /* No RTL_EXPRs in this function yet. */
5722 rtl_expr_chain = 0;
5723
5724 /* Set up to allocate temporaries. */
5725 init_temp_slots ();
5726
5727 /* Indicate that we need to distinguish between the return value of the
5728 present function and the return value of a function being called. */
5729 rtx_equal_function_value_matters = 1;
5730
5731 /* Indicate that we have not instantiated virtual registers yet. */
5732 virtuals_instantiated = 0;
5733
5734 /* Indicate we have no need of a frame pointer yet. */
5735 frame_pointer_needed = 0;
5736
5737 /* By default assume not varargs or stdarg. */
5738 current_function_varargs = 0;
5739 current_function_stdarg = 0;
5740
5741 /* We haven't made any trampolines for this function yet. */
5742 trampoline_list = 0;
5743
5744 init_pending_stack_adjust ();
5745 inhibit_defer_pop = 0;
5746
5747 current_function_outgoing_args_size = 0;
5748
5749 if (init_lang_status)
5750 (*init_lang_status) (cfun);
5751 if (init_machine_status)
5752 (*init_machine_status) (cfun);
5753 }
5754
5755 /* Initialize the rtl expansion mechanism so that we can do simple things
5756 like generate sequences. This is used to provide a context during global
5757 initialization of some passes. */
5758 void
5759 init_dummy_function_start ()
5760 {
5761 prepare_function_start ();
5762 }
5763
5764 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5765 and initialize static variables for generating RTL for the statements
5766 of the function. */
5767
5768 void
5769 init_function_start (subr, filename, line)
5770 tree subr;
5771 char *filename;
5772 int line;
5773 {
5774 prepare_function_start ();
5775
5776 /* Remember this function for later. */
5777 cfun->next_global = all_functions;
5778 all_functions = cfun;
5779
5780 current_function_name = (*decl_printable_name) (subr, 2);
5781 cfun->decl = subr;
5782
5783 /* Nonzero if this is a nested function that uses a static chain. */
5784
5785 current_function_needs_context
5786 = (decl_function_context (current_function_decl) != 0
5787 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5788
5789 /* Within function body, compute a type's size as soon it is laid out. */
5790 immediate_size_expand++;
5791
5792 /* Prevent ever trying to delete the first instruction of a function.
5793 Also tell final how to output a linenum before the function prologue.
5794 Note linenums could be missing, e.g. when compiling a Java .class file. */
5795 if (line > 0)
5796 emit_line_note (filename, line);
5797
5798 /* Make sure first insn is a note even if we don't want linenums.
5799 This makes sure the first insn will never be deleted.
5800 Also, final expects a note to appear there. */
5801 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5802
5803 /* Set flags used by final.c. */
5804 if (aggregate_value_p (DECL_RESULT (subr)))
5805 {
5806 #ifdef PCC_STATIC_STRUCT_RETURN
5807 current_function_returns_pcc_struct = 1;
5808 #endif
5809 current_function_returns_struct = 1;
5810 }
5811
5812 /* Warn if this value is an aggregate type,
5813 regardless of which calling convention we are using for it. */
5814 if (warn_aggregate_return
5815 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5816 warning ("function returns an aggregate");
5817
5818 current_function_returns_pointer
5819 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5820 }
5821
5822 /* Make sure all values used by the optimization passes have sane
5823 defaults. */
5824 void
5825 init_function_for_compilation ()
5826 {
5827 reg_renumber = 0;
5828 /* No prologue/epilogue insns yet. */
5829 prologue = epilogue = 0;
5830 }
5831
5832 /* Indicate that the current function uses extra args
5833 not explicitly mentioned in the argument list in any fashion. */
5834
5835 void
5836 mark_varargs ()
5837 {
5838 current_function_varargs = 1;
5839 }
5840
5841 /* Expand a call to __main at the beginning of a possible main function. */
5842
5843 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5844 #undef HAS_INIT_SECTION
5845 #define HAS_INIT_SECTION
5846 #endif
5847
5848 void
5849 expand_main_function ()
5850 {
5851 #if !defined (HAS_INIT_SECTION)
5852 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5853 VOIDmode, 0);
5854 #endif /* not HAS_INIT_SECTION */
5855 }
5856 \f
5857 extern struct obstack permanent_obstack;
5858
5859 /* Start the RTL for a new function, and set variables used for
5860 emitting RTL.
5861 SUBR is the FUNCTION_DECL node.
5862 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5863 the function's parameters, which must be run at any return statement. */
5864
5865 void
5866 expand_function_start (subr, parms_have_cleanups)
5867 tree subr;
5868 int parms_have_cleanups;
5869 {
5870 tree tem;
5871 rtx last_ptr = NULL_RTX;
5872
5873 /* Make sure volatile mem refs aren't considered
5874 valid operands of arithmetic insns. */
5875 init_recog_no_volatile ();
5876
5877 /* Set this before generating any memory accesses. */
5878 current_function_check_memory_usage
5879 = (flag_check_memory_usage
5880 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5881
5882 current_function_instrument_entry_exit
5883 = (flag_instrument_function_entry_exit
5884 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5885
5886 current_function_limit_stack
5887 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5888
5889 /* If function gets a static chain arg, store it in the stack frame.
5890 Do this first, so it gets the first stack slot offset. */
5891 if (current_function_needs_context)
5892 {
5893 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5894
5895 /* Delay copying static chain if it is not a register to avoid
5896 conflicts with regs used for parameters. */
5897 if (! SMALL_REGISTER_CLASSES
5898 || GET_CODE (static_chain_incoming_rtx) == REG)
5899 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5900 }
5901
5902 /* If the parameters of this function need cleaning up, get a label
5903 for the beginning of the code which executes those cleanups. This must
5904 be done before doing anything with return_label. */
5905 if (parms_have_cleanups)
5906 cleanup_label = gen_label_rtx ();
5907 else
5908 cleanup_label = 0;
5909
5910 /* Make the label for return statements to jump to, if this machine
5911 does not have a one-instruction return and uses an epilogue,
5912 or if it returns a structure, or if it has parm cleanups. */
5913 #ifdef HAVE_return
5914 if (cleanup_label == 0 && HAVE_return
5915 && ! current_function_instrument_entry_exit
5916 && ! current_function_returns_pcc_struct
5917 && ! (current_function_returns_struct && ! optimize))
5918 return_label = 0;
5919 else
5920 return_label = gen_label_rtx ();
5921 #else
5922 return_label = gen_label_rtx ();
5923 #endif
5924
5925 /* Initialize rtx used to return the value. */
5926 /* Do this before assign_parms so that we copy the struct value address
5927 before any library calls that assign parms might generate. */
5928
5929 /* Decide whether to return the value in memory or in a register. */
5930 if (aggregate_value_p (DECL_RESULT (subr)))
5931 {
5932 /* Returning something that won't go in a register. */
5933 register rtx value_address = 0;
5934
5935 #ifdef PCC_STATIC_STRUCT_RETURN
5936 if (current_function_returns_pcc_struct)
5937 {
5938 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5939 value_address = assemble_static_space (size);
5940 }
5941 else
5942 #endif
5943 {
5944 /* Expect to be passed the address of a place to store the value.
5945 If it is passed as an argument, assign_parms will take care of
5946 it. */
5947 if (struct_value_incoming_rtx)
5948 {
5949 value_address = gen_reg_rtx (Pmode);
5950 emit_move_insn (value_address, struct_value_incoming_rtx);
5951 }
5952 }
5953 if (value_address)
5954 {
5955 DECL_RTL (DECL_RESULT (subr))
5956 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5957 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5958 AGGREGATE_TYPE_P (TREE_TYPE
5959 (DECL_RESULT
5960 (subr))));
5961 }
5962 }
5963 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5964 /* If return mode is void, this decl rtl should not be used. */
5965 DECL_RTL (DECL_RESULT (subr)) = 0;
5966 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5967 {
5968 /* If function will end with cleanup code for parms,
5969 compute the return values into a pseudo reg,
5970 which we will copy into the true return register
5971 after the cleanups are done. */
5972
5973 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5974
5975 #ifdef PROMOTE_FUNCTION_RETURN
5976 tree type = TREE_TYPE (DECL_RESULT (subr));
5977 int unsignedp = TREE_UNSIGNED (type);
5978
5979 mode = promote_mode (type, mode, &unsignedp, 1);
5980 #endif
5981
5982 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5983 }
5984 else
5985 /* Scalar, returned in a register. */
5986 {
5987 #ifdef FUNCTION_OUTGOING_VALUE
5988 DECL_RTL (DECL_RESULT (subr))
5989 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5990 #else
5991 DECL_RTL (DECL_RESULT (subr))
5992 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5993 #endif
5994
5995 /* Mark this reg as the function's return value. */
5996 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5997 {
5998 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5999 /* Needed because we may need to move this to memory
6000 in case it's a named return value whose address is taken. */
6001 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6002 }
6003 }
6004
6005 /* Initialize rtx for parameters and local variables.
6006 In some cases this requires emitting insns. */
6007
6008 assign_parms (subr);
6009
6010 /* Copy the static chain now if it wasn't a register. The delay is to
6011 avoid conflicts with the parameter passing registers. */
6012
6013 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6014 if (GET_CODE (static_chain_incoming_rtx) != REG)
6015 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6016
6017 /* The following was moved from init_function_start.
6018 The move is supposed to make sdb output more accurate. */
6019 /* Indicate the beginning of the function body,
6020 as opposed to parm setup. */
6021 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6022
6023 if (GET_CODE (get_last_insn ()) != NOTE)
6024 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6025 parm_birth_insn = get_last_insn ();
6026
6027 context_display = 0;
6028 if (current_function_needs_context)
6029 {
6030 /* Fetch static chain values for containing functions. */
6031 tem = decl_function_context (current_function_decl);
6032 /* Copy the static chain pointer into a pseudo. If we have
6033 small register classes, copy the value from memory if
6034 static_chain_incoming_rtx is a REG. */
6035 if (tem)
6036 {
6037 /* If the static chain originally came in a register, put it back
6038 there, then move it out in the next insn. The reason for
6039 this peculiar code is to satisfy function integration. */
6040 if (SMALL_REGISTER_CLASSES
6041 && GET_CODE (static_chain_incoming_rtx) == REG)
6042 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6043 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6044 }
6045
6046 while (tem)
6047 {
6048 tree rtlexp = make_node (RTL_EXPR);
6049
6050 RTL_EXPR_RTL (rtlexp) = last_ptr;
6051 context_display = tree_cons (tem, rtlexp, context_display);
6052 tem = decl_function_context (tem);
6053 if (tem == 0)
6054 break;
6055 /* Chain thru stack frames, assuming pointer to next lexical frame
6056 is found at the place we always store it. */
6057 #ifdef FRAME_GROWS_DOWNWARD
6058 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6059 #endif
6060 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6061 memory_address (Pmode,
6062 last_ptr)));
6063
6064 /* If we are not optimizing, ensure that we know that this
6065 piece of context is live over the entire function. */
6066 if (! optimize)
6067 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6068 save_expr_regs);
6069 }
6070 }
6071
6072 if (current_function_instrument_entry_exit)
6073 {
6074 rtx fun = DECL_RTL (current_function_decl);
6075 if (GET_CODE (fun) == MEM)
6076 fun = XEXP (fun, 0);
6077 else
6078 abort ();
6079 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6080 fun, Pmode,
6081 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6082 0,
6083 hard_frame_pointer_rtx),
6084 Pmode);
6085 }
6086
6087 /* After the display initializations is where the tail-recursion label
6088 should go, if we end up needing one. Ensure we have a NOTE here
6089 since some things (like trampolines) get placed before this. */
6090 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6091
6092 /* Evaluate now the sizes of any types declared among the arguments. */
6093 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6094 {
6095 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6096 EXPAND_MEMORY_USE_BAD);
6097 /* Flush the queue in case this parameter declaration has
6098 side-effects. */
6099 emit_queue ();
6100 }
6101
6102 /* Make sure there is a line number after the function entry setup code. */
6103 force_next_line_note ();
6104 }
6105 \f
6106 /* Undo the effects of init_dummy_function_start. */
6107 void
6108 expand_dummy_function_end ()
6109 {
6110 /* End any sequences that failed to be closed due to syntax errors. */
6111 while (in_sequence_p ())
6112 end_sequence ();
6113
6114 /* Outside function body, can't compute type's actual size
6115 until next function's body starts. */
6116
6117 free_after_parsing (cfun);
6118 free_after_compilation (cfun);
6119 free (cfun);
6120 cfun = 0;
6121 }
6122
6123 /* Call DOIT for each hard register used as a return value from
6124 the current function. */
6125
6126 void
6127 diddle_return_value (doit, arg)
6128 void (*doit) PARAMS ((rtx, void *));
6129 void *arg;
6130 {
6131 rtx outgoing = current_function_return_rtx;
6132
6133 if (! outgoing)
6134 return;
6135
6136 if (GET_CODE (outgoing) == REG
6137 && REGNO (outgoing) >= FIRST_PSEUDO_REGISTER)
6138 {
6139 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6140 #ifdef FUNCTION_OUTGOING_VALUE
6141 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
6142 #else
6143 outgoing = FUNCTION_VALUE (type, current_function_decl);
6144 #endif
6145 /* If this is a BLKmode structure being returned in registers, then use
6146 the mode computed in expand_return. */
6147 if (GET_MODE (outgoing) == BLKmode)
6148 PUT_MODE (outgoing,
6149 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6150 }
6151
6152 if (GET_CODE (outgoing) == REG)
6153 (*doit) (outgoing, arg);
6154 else if (GET_CODE (outgoing) == PARALLEL)
6155 {
6156 int i;
6157
6158 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6159 {
6160 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6161
6162 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6163 (*doit) (x, arg);
6164 }
6165 }
6166 }
6167
6168 static void
6169 do_clobber_return_reg (reg, arg)
6170 rtx reg;
6171 void *arg ATTRIBUTE_UNUSED;
6172 {
6173 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6174 }
6175
6176 void
6177 clobber_return_register ()
6178 {
6179 diddle_return_value (do_clobber_return_reg, NULL);
6180 }
6181
6182 static void
6183 do_use_return_reg (reg, arg)
6184 rtx reg;
6185 void *arg ATTRIBUTE_UNUSED;
6186 {
6187 emit_insn (gen_rtx_USE (VOIDmode, reg));
6188 }
6189
6190 void
6191 use_return_register ()
6192 {
6193 diddle_return_value (do_use_return_reg, NULL);
6194 }
6195
6196 /* Generate RTL for the end of the current function.
6197 FILENAME and LINE are the current position in the source file.
6198
6199 It is up to language-specific callers to do cleanups for parameters--
6200 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6201
6202 void
6203 expand_function_end (filename, line, end_bindings)
6204 char *filename;
6205 int line;
6206 int end_bindings;
6207 {
6208 tree link;
6209
6210 #ifdef TRAMPOLINE_TEMPLATE
6211 static rtx initial_trampoline;
6212 #endif
6213
6214 finish_expr_for_function ();
6215
6216 #ifdef NON_SAVING_SETJMP
6217 /* Don't put any variables in registers if we call setjmp
6218 on a machine that fails to restore the registers. */
6219 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6220 {
6221 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6222 setjmp_protect (DECL_INITIAL (current_function_decl));
6223
6224 setjmp_protect_args ();
6225 }
6226 #endif
6227
6228 /* Save the argument pointer if a save area was made for it. */
6229 if (arg_pointer_save_area)
6230 {
6231 /* arg_pointer_save_area may not be a valid memory address, so we
6232 have to check it and fix it if necessary. */
6233 rtx seq;
6234 start_sequence ();
6235 emit_move_insn (validize_mem (arg_pointer_save_area),
6236 virtual_incoming_args_rtx);
6237 seq = gen_sequence ();
6238 end_sequence ();
6239 emit_insn_before (seq, tail_recursion_reentry);
6240 }
6241
6242 /* Initialize any trampolines required by this function. */
6243 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6244 {
6245 tree function = TREE_PURPOSE (link);
6246 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6247 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6248 #ifdef TRAMPOLINE_TEMPLATE
6249 rtx blktramp;
6250 #endif
6251 rtx seq;
6252
6253 #ifdef TRAMPOLINE_TEMPLATE
6254 /* First make sure this compilation has a template for
6255 initializing trampolines. */
6256 if (initial_trampoline == 0)
6257 {
6258 end_temporary_allocation ();
6259 initial_trampoline
6260 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6261 resume_temporary_allocation ();
6262
6263 ggc_add_rtx_root (&initial_trampoline, 1);
6264 }
6265 #endif
6266
6267 /* Generate insns to initialize the trampoline. */
6268 start_sequence ();
6269 tramp = round_trampoline_addr (XEXP (tramp, 0));
6270 #ifdef TRAMPOLINE_TEMPLATE
6271 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6272 emit_block_move (blktramp, initial_trampoline,
6273 GEN_INT (TRAMPOLINE_SIZE),
6274 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6275 #endif
6276 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6277 seq = get_insns ();
6278 end_sequence ();
6279
6280 /* Put those insns at entry to the containing function (this one). */
6281 emit_insns_before (seq, tail_recursion_reentry);
6282 }
6283
6284 /* If we are doing stack checking and this function makes calls,
6285 do a stack probe at the start of the function to ensure we have enough
6286 space for another stack frame. */
6287 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6288 {
6289 rtx insn, seq;
6290
6291 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6292 if (GET_CODE (insn) == CALL_INSN)
6293 {
6294 start_sequence ();
6295 probe_stack_range (STACK_CHECK_PROTECT,
6296 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6297 seq = get_insns ();
6298 end_sequence ();
6299 emit_insns_before (seq, tail_recursion_reentry);
6300 break;
6301 }
6302 }
6303
6304 /* Warn about unused parms if extra warnings were specified. */
6305 if (warn_unused && extra_warnings)
6306 {
6307 tree decl;
6308
6309 for (decl = DECL_ARGUMENTS (current_function_decl);
6310 decl; decl = TREE_CHAIN (decl))
6311 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6312 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6313 warning_with_decl (decl, "unused parameter `%s'");
6314 }
6315
6316 /* Delete handlers for nonlocal gotos if nothing uses them. */
6317 if (nonlocal_goto_handler_slots != 0
6318 && ! current_function_has_nonlocal_label)
6319 delete_handlers ();
6320
6321 /* End any sequences that failed to be closed due to syntax errors. */
6322 while (in_sequence_p ())
6323 end_sequence ();
6324
6325 /* Outside function body, can't compute type's actual size
6326 until next function's body starts. */
6327 immediate_size_expand--;
6328
6329 clear_pending_stack_adjust ();
6330 do_pending_stack_adjust ();
6331
6332 /* Mark the end of the function body.
6333 If control reaches this insn, the function can drop through
6334 without returning a value. */
6335 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6336
6337 /* Must mark the last line number note in the function, so that the test
6338 coverage code can avoid counting the last line twice. This just tells
6339 the code to ignore the immediately following line note, since there
6340 already exists a copy of this note somewhere above. This line number
6341 note is still needed for debugging though, so we can't delete it. */
6342 if (flag_test_coverage)
6343 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6344
6345 /* Output a linenumber for the end of the function.
6346 SDB depends on this. */
6347 emit_line_note_force (filename, line);
6348
6349 /* Output the label for the actual return from the function,
6350 if one is expected. This happens either because a function epilogue
6351 is used instead of a return instruction, or because a return was done
6352 with a goto in order to run local cleanups, or because of pcc-style
6353 structure returning. */
6354
6355 if (return_label)
6356 {
6357 /* Before the return label, clobber the return registers so that
6358 they are not propogated live to the rest of the function. This
6359 can only happen with functions that drop through; if there had
6360 been a return statement, there would have either been a return
6361 rtx, or a jump to the return label. */
6362 clobber_return_register ();
6363
6364 emit_label (return_label);
6365 }
6366
6367 /* C++ uses this. */
6368 if (end_bindings)
6369 expand_end_bindings (0, 0, 0);
6370
6371 /* Now handle any leftover exception regions that may have been
6372 created for the parameters. */
6373 {
6374 rtx last = get_last_insn ();
6375 rtx label;
6376
6377 expand_leftover_cleanups ();
6378
6379 /* If there are any catch_clauses remaining, output them now. */
6380 emit_insns (catch_clauses);
6381 catch_clauses = NULL_RTX;
6382 /* If the above emitted any code, may sure we jump around it. */
6383 if (last != get_last_insn ())
6384 {
6385 label = gen_label_rtx ();
6386 last = emit_jump_insn_after (gen_jump (label), last);
6387 last = emit_barrier_after (last);
6388 emit_label (label);
6389 }
6390 }
6391
6392 if (current_function_instrument_entry_exit)
6393 {
6394 rtx fun = DECL_RTL (current_function_decl);
6395 if (GET_CODE (fun) == MEM)
6396 fun = XEXP (fun, 0);
6397 else
6398 abort ();
6399 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6400 fun, Pmode,
6401 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6402 0,
6403 hard_frame_pointer_rtx),
6404 Pmode);
6405 }
6406
6407 /* If we had calls to alloca, and this machine needs
6408 an accurate stack pointer to exit the function,
6409 insert some code to save and restore the stack pointer. */
6410 #ifdef EXIT_IGNORE_STACK
6411 if (! EXIT_IGNORE_STACK)
6412 #endif
6413 if (current_function_calls_alloca)
6414 {
6415 rtx tem = 0;
6416
6417 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6418 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6419 }
6420
6421 /* If scalar return value was computed in a pseudo-reg,
6422 copy that to the hard return register. */
6423 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6424 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6425 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6426 >= FIRST_PSEUDO_REGISTER))
6427 {
6428 rtx real_decl_result;
6429
6430 #ifdef FUNCTION_OUTGOING_VALUE
6431 real_decl_result
6432 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6433 current_function_decl);
6434 #else
6435 real_decl_result
6436 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6437 current_function_decl);
6438 #endif
6439 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6440 /* If this is a BLKmode structure being returned in registers, then use
6441 the mode computed in expand_return. */
6442 if (GET_MODE (real_decl_result) == BLKmode)
6443 PUT_MODE (real_decl_result,
6444 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6445 emit_move_insn (real_decl_result,
6446 DECL_RTL (DECL_RESULT (current_function_decl)));
6447
6448 /* The delay slot scheduler assumes that current_function_return_rtx
6449 holds the hard register containing the return value, not a temporary
6450 pseudo. */
6451 current_function_return_rtx = real_decl_result;
6452 }
6453
6454 /* If returning a structure, arrange to return the address of the value
6455 in a place where debuggers expect to find it.
6456
6457 If returning a structure PCC style,
6458 the caller also depends on this value.
6459 And current_function_returns_pcc_struct is not necessarily set. */
6460 if (current_function_returns_struct
6461 || current_function_returns_pcc_struct)
6462 {
6463 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6464 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6465 #ifdef FUNCTION_OUTGOING_VALUE
6466 rtx outgoing
6467 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6468 current_function_decl);
6469 #else
6470 rtx outgoing
6471 = FUNCTION_VALUE (build_pointer_type (type),
6472 current_function_decl);
6473 #endif
6474
6475 /* Mark this as a function return value so integrate will delete the
6476 assignment and USE below when inlining this function. */
6477 REG_FUNCTION_VALUE_P (outgoing) = 1;
6478
6479 emit_move_insn (outgoing, value_address);
6480 }
6481
6482 /* ??? This should no longer be necessary since stupid is no longer with
6483 us, but there are some parts of the compiler (eg reload_combine, and
6484 sh mach_dep_reorg) that still try and compute their own lifetime info
6485 instead of using the general framework. */
6486 use_return_register ();
6487
6488 /* If this is an implementation of __throw, do what's necessary to
6489 communicate between __builtin_eh_return and the epilogue. */
6490 expand_eh_return ();
6491
6492 /* Output a return insn if we are using one.
6493 Otherwise, let the rtl chain end here, to drop through
6494 into the epilogue. */
6495
6496 #ifdef HAVE_return
6497 if (HAVE_return)
6498 {
6499 emit_jump_insn (gen_return ());
6500 emit_barrier ();
6501 }
6502 #endif
6503
6504 /* Fix up any gotos that jumped out to the outermost
6505 binding level of the function.
6506 Must follow emitting RETURN_LABEL. */
6507
6508 /* If you have any cleanups to do at this point,
6509 and they need to create temporary variables,
6510 then you will lose. */
6511 expand_fixups (get_insns ());
6512 }
6513 \f
6514 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6515 or a single insn). */
6516
6517 static int *
6518 record_insns (insns)
6519 rtx insns;
6520 {
6521 int *vec;
6522
6523 if (GET_CODE (insns) == SEQUENCE)
6524 {
6525 int len = XVECLEN (insns, 0);
6526 vec = (int *) oballoc ((len + 1) * sizeof (int));
6527 vec[len] = 0;
6528 while (--len >= 0)
6529 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6530 }
6531 else
6532 {
6533 vec = (int *) oballoc (2 * sizeof (int));
6534 vec[0] = INSN_UID (insns);
6535 vec[1] = 0;
6536 }
6537 return vec;
6538 }
6539
6540 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6541
6542 static int
6543 contains (insn, vec)
6544 rtx insn;
6545 int *vec;
6546 {
6547 register int i, j;
6548
6549 if (GET_CODE (insn) == INSN
6550 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6551 {
6552 int count = 0;
6553 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6554 for (j = 0; vec[j]; j++)
6555 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6556 count++;
6557 return count;
6558 }
6559 else
6560 {
6561 for (j = 0; vec[j]; j++)
6562 if (INSN_UID (insn) == vec[j])
6563 return 1;
6564 }
6565 return 0;
6566 }
6567
6568 int
6569 prologue_epilogue_contains (insn)
6570 rtx insn;
6571 {
6572 if (prologue && contains (insn, prologue))
6573 return 1;
6574 if (epilogue && contains (insn, epilogue))
6575 return 1;
6576 return 0;
6577 }
6578
6579 #ifdef HAVE_return
6580 /* Insert gen_return at the end of block BB. This also means updating
6581 block_for_insn appropriately. */
6582
6583 static void
6584 emit_return_into_block (bb)
6585 basic_block bb;
6586 {
6587 rtx p, end;
6588
6589 end = emit_jump_insn_after (gen_return (), bb->end);
6590 p = NEXT_INSN (bb->end);
6591 while (1)
6592 {
6593 set_block_for_insn (p, bb);
6594 if (p == end)
6595 break;
6596 p = NEXT_INSN (p);
6597 }
6598 bb->end = end;
6599 }
6600 #endif /* HAVE_return */
6601
6602 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6603 this into place with notes indicating where the prologue ends and where
6604 the epilogue begins. Update the basic block information when possible. */
6605
6606 void
6607 thread_prologue_and_epilogue_insns (f)
6608 rtx f ATTRIBUTE_UNUSED;
6609 {
6610 int insertted = 0;
6611 edge e;
6612 rtx seq;
6613
6614 #ifdef HAVE_prologue
6615 if (HAVE_prologue)
6616 {
6617 rtx insn;
6618
6619 start_sequence ();
6620 seq = gen_prologue();
6621 emit_insn (seq);
6622
6623 /* Retain a map of the prologue insns. */
6624 if (GET_CODE (seq) != SEQUENCE)
6625 seq = get_insns ();
6626 prologue = record_insns (seq);
6627 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6628
6629 /* GDB handles `break f' by setting a breakpoint on the first
6630 line note *after* the prologue. That means that we should
6631 insert a line note here; otherwise, if the next line note
6632 comes part way into the next block, GDB will skip all the way
6633 to that point. */
6634 insn = next_nonnote_insn (f);
6635 while (insn)
6636 {
6637 if (GET_CODE (insn) == NOTE
6638 && NOTE_LINE_NUMBER (insn) >= 0)
6639 {
6640 emit_line_note_force (NOTE_SOURCE_FILE (insn),
6641 NOTE_LINE_NUMBER (insn));
6642 break;
6643 }
6644
6645 insn = PREV_INSN (insn);
6646 }
6647
6648 seq = gen_sequence ();
6649 end_sequence ();
6650
6651 /* If optimization is off, and perhaps in an empty function,
6652 the entry block will have no successors. */
6653 if (ENTRY_BLOCK_PTR->succ)
6654 {
6655 /* Can't deal with multiple successsors of the entry block. */
6656 if (ENTRY_BLOCK_PTR->succ->succ_next)
6657 abort ();
6658
6659 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6660 insertted = 1;
6661 }
6662 else
6663 emit_insn_after (seq, f);
6664 }
6665 #endif
6666
6667 /* If the exit block has no non-fake predecessors, we don't need
6668 an epilogue. */
6669 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6670 if ((e->flags & EDGE_FAKE) == 0)
6671 break;
6672 if (e == NULL)
6673 goto epilogue_done;
6674
6675 #ifdef HAVE_return
6676 if (optimize && HAVE_return)
6677 {
6678 /* If we're allowed to generate a simple return instruction,
6679 then by definition we don't need a full epilogue. Examine
6680 the block that falls through to EXIT. If it does not
6681 contain any code, examine its predecessors and try to
6682 emit (conditional) return instructions. */
6683
6684 basic_block last;
6685 edge e_next;
6686 rtx label;
6687
6688 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6689 if (e->flags & EDGE_FALLTHRU)
6690 break;
6691 if (e == NULL)
6692 goto epilogue_done;
6693 last = e->src;
6694
6695 /* Verify that there are no active instructions in the last block. */
6696 label = last->end;
6697 while (label && GET_CODE (label) != CODE_LABEL)
6698 {
6699 if (active_insn_p (label))
6700 break;
6701 label = PREV_INSN (label);
6702 }
6703
6704 if (last->head == label && GET_CODE (label) == CODE_LABEL)
6705 {
6706 for (e = last->pred; e ; e = e_next)
6707 {
6708 basic_block bb = e->src;
6709 rtx jump;
6710
6711 e_next = e->pred_next;
6712 if (bb == ENTRY_BLOCK_PTR)
6713 continue;
6714
6715 jump = bb->end;
6716 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
6717 continue;
6718
6719 /* If we have an unconditional jump, we can replace that
6720 with a simple return instruction. */
6721 if (simplejump_p (jump))
6722 {
6723 emit_return_into_block (bb);
6724 flow_delete_insn (jump);
6725 }
6726
6727 /* If we have a conditional jump, we can try to replace
6728 that with a conditional return instruction. */
6729 else if (condjump_p (jump))
6730 {
6731 rtx ret, *loc;
6732
6733 ret = SET_SRC (PATTERN (jump));
6734 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
6735 loc = &XEXP (ret, 1);
6736 else
6737 loc = &XEXP (ret, 2);
6738 ret = gen_rtx_RETURN (VOIDmode);
6739
6740 if (! validate_change (jump, loc, ret, 0))
6741 continue;
6742 if (JUMP_LABEL (jump))
6743 LABEL_NUSES (JUMP_LABEL (jump))--;
6744
6745 /* If this block has only one successor, it both jumps
6746 and falls through to the fallthru block, so we can't
6747 delete the edge. */
6748 if (bb->succ->succ_next == NULL)
6749 continue;
6750 }
6751 else
6752 continue;
6753
6754 /* Fix up the CFG for the successful change we just made. */
6755 remove_edge (e);
6756 make_edge (NULL, bb, EXIT_BLOCK_PTR, 0);
6757 }
6758
6759 /* Emit a return insn for the exit fallthru block. Whether
6760 this is still reachable will be determined later. */
6761
6762 emit_barrier_after (last->end);
6763 emit_return_into_block (last);
6764 }
6765 else
6766 {
6767 /* The exit block wasn't empty. We have to use insert_insn_on_edge,
6768 as it may be the exit block can go elsewhere as well
6769 as exiting. */
6770 start_sequence ();
6771 emit_jump_insn (gen_return ());
6772 seq = gen_sequence ();
6773 end_sequence ();
6774 insert_insn_on_edge (seq, e);
6775 insertted = 1;
6776 }
6777 goto epilogue_done;
6778 }
6779 #endif
6780 #ifdef HAVE_epilogue
6781 if (HAVE_epilogue)
6782 {
6783 /* Find the edge that falls through to EXIT. Other edges may exist
6784 due to RETURN instructions, but those don't need epilogues.
6785 There really shouldn't be a mixture -- either all should have
6786 been converted or none, however... */
6787
6788 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6789 if (e->flags & EDGE_FALLTHRU)
6790 break;
6791 if (e == NULL)
6792 goto epilogue_done;
6793
6794 start_sequence ();
6795 emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
6796
6797 seq = gen_epilogue ();
6798 emit_jump_insn (seq);
6799
6800 /* Retain a map of the epilogue insns. */
6801 if (GET_CODE (seq) != SEQUENCE)
6802 seq = get_insns ();
6803 epilogue = record_insns (seq);
6804
6805 seq = gen_sequence ();
6806 end_sequence();
6807
6808 insert_insn_on_edge (seq, e);
6809 insertted = 1;
6810 }
6811 #endif
6812 epilogue_done:
6813
6814 if (insertted)
6815 commit_edge_insertions ();
6816 }
6817
6818 /* Reposition the prologue-end and epilogue-begin notes after instruction
6819 scheduling and delayed branch scheduling. */
6820
6821 void
6822 reposition_prologue_and_epilogue_notes (f)
6823 rtx f ATTRIBUTE_UNUSED;
6824 {
6825 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6826 /* Reposition the prologue and epilogue notes. */
6827 if (n_basic_blocks)
6828 {
6829 int len;
6830
6831 if (prologue)
6832 {
6833 register rtx insn, note = 0;
6834
6835 /* Scan from the beginning until we reach the last prologue insn.
6836 We apparently can't depend on basic_block_{head,end} after
6837 reorg has run. */
6838 for (len = 0; prologue[len]; len++)
6839 ;
6840 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6841 {
6842 if (GET_CODE (insn) == NOTE)
6843 {
6844 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6845 note = insn;
6846 }
6847 else if ((len -= contains (insn, prologue)) == 0)
6848 {
6849 rtx next;
6850 /* Find the prologue-end note if we haven't already, and
6851 move it to just after the last prologue insn. */
6852 if (note == 0)
6853 {
6854 for (note = insn; (note = NEXT_INSN (note));)
6855 if (GET_CODE (note) == NOTE
6856 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6857 break;
6858 }
6859
6860 next = NEXT_INSN (note);
6861
6862 /* Whether or not we can depend on BLOCK_HEAD,
6863 attempt to keep it up-to-date. */
6864 if (BLOCK_HEAD (0) == note)
6865 BLOCK_HEAD (0) = next;
6866
6867 remove_insn (note);
6868 add_insn_after (note, insn);
6869 }
6870 }
6871 }
6872
6873 if (epilogue)
6874 {
6875 register rtx insn, note = 0;
6876
6877 /* Scan from the end until we reach the first epilogue insn.
6878 We apparently can't depend on basic_block_{head,end} after
6879 reorg has run. */
6880 for (len = 0; epilogue[len]; len++)
6881 ;
6882 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6883 {
6884 if (GET_CODE (insn) == NOTE)
6885 {
6886 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6887 note = insn;
6888 }
6889 else if ((len -= contains (insn, epilogue)) == 0)
6890 {
6891 /* Find the epilogue-begin note if we haven't already, and
6892 move it to just before the first epilogue insn. */
6893 if (note == 0)
6894 {
6895 for (note = insn; (note = PREV_INSN (note));)
6896 if (GET_CODE (note) == NOTE
6897 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6898 break;
6899 }
6900
6901 /* Whether or not we can depend on BLOCK_HEAD,
6902 attempt to keep it up-to-date. */
6903 if (n_basic_blocks
6904 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6905 BLOCK_HEAD (n_basic_blocks-1) = note;
6906
6907 remove_insn (note);
6908 add_insn_before (note, insn);
6909 }
6910 }
6911 }
6912 }
6913 #endif /* HAVE_prologue or HAVE_epilogue */
6914 }
6915
6916 /* Mark T for GC. */
6917
6918 static void
6919 mark_temp_slot (t)
6920 struct temp_slot *t;
6921 {
6922 while (t)
6923 {
6924 ggc_mark_rtx (t->slot);
6925 ggc_mark_rtx (t->address);
6926 ggc_mark_tree (t->rtl_expr);
6927
6928 t = t->next;
6929 }
6930 }
6931
6932 /* Mark P for GC. */
6933
6934 static void
6935 mark_function_status (p)
6936 struct function *p;
6937 {
6938 int i;
6939 rtx *r;
6940
6941 if (p == 0)
6942 return;
6943
6944 ggc_mark_rtx (p->arg_offset_rtx);
6945
6946 if (p->x_parm_reg_stack_loc)
6947 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
6948 i > 0; --i, ++r)
6949 ggc_mark_rtx (*r);
6950
6951 ggc_mark_rtx (p->return_rtx);
6952 ggc_mark_rtx (p->x_cleanup_label);
6953 ggc_mark_rtx (p->x_return_label);
6954 ggc_mark_rtx (p->x_save_expr_regs);
6955 ggc_mark_rtx (p->x_stack_slot_list);
6956 ggc_mark_rtx (p->x_parm_birth_insn);
6957 ggc_mark_rtx (p->x_tail_recursion_label);
6958 ggc_mark_rtx (p->x_tail_recursion_reentry);
6959 ggc_mark_rtx (p->internal_arg_pointer);
6960 ggc_mark_rtx (p->x_arg_pointer_save_area);
6961 ggc_mark_tree (p->x_rtl_expr_chain);
6962 ggc_mark_rtx (p->x_last_parm_insn);
6963 ggc_mark_tree (p->x_context_display);
6964 ggc_mark_tree (p->x_trampoline_list);
6965 ggc_mark_rtx (p->epilogue_delay_list);
6966
6967 mark_temp_slot (p->x_temp_slots);
6968
6969 {
6970 struct var_refs_queue *q = p->fixup_var_refs_queue;
6971 while (q)
6972 {
6973 ggc_mark_rtx (q->modified);
6974 q = q->next;
6975 }
6976 }
6977
6978 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
6979 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
6980 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
6981 ggc_mark_tree (p->x_nonlocal_labels);
6982 }
6983
6984 /* Mark the function chain ARG (which is really a struct function **)
6985 for GC. */
6986
6987 static void
6988 mark_function_chain (arg)
6989 void *arg;
6990 {
6991 struct function *f = *(struct function **) arg;
6992
6993 for (; f; f = f->next_global)
6994 {
6995 ggc_mark_tree (f->decl);
6996
6997 mark_function_status (f);
6998 mark_eh_status (f->eh);
6999 mark_stmt_status (f->stmt);
7000 mark_expr_status (f->expr);
7001 mark_emit_status (f->emit);
7002 mark_varasm_status (f->varasm);
7003
7004 if (mark_machine_status)
7005 (*mark_machine_status) (f);
7006 if (mark_lang_status)
7007 (*mark_lang_status) (f);
7008
7009 if (f->original_arg_vector)
7010 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7011 if (f->original_decl_initial)
7012 ggc_mark_tree (f->original_decl_initial);
7013 }
7014 }
7015
7016 /* Called once, at initialization, to initialize function.c. */
7017
7018 void
7019 init_function_once ()
7020 {
7021 ggc_add_root (&all_functions, 1, sizeof all_functions,
7022 mark_function_chain);
7023 }
This page took 0.351388 seconds and 6 git commands to generate.