]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
bitmap.c (bitmap_operation): Avoid using -1 for index since unsigned.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-99, 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59 #include "hash.h"
60 #include "ggc.h"
61 #include "tm_p.h"
62
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
65 #endif
66
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #endif
70
71 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
72 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
73 #endif
74
75 /* Some systems use __main in a way incompatible with its use in gcc, in these
76 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
77 give the same symbol without quotes for an alternative entry point. You
78 must define both, or neither. */
79 #ifndef NAME__MAIN
80 #define NAME__MAIN "__main"
81 #define SYMBOL__MAIN __main
82 #endif
83
84 /* Round a value to the lowest integer less than it that is a multiple of
85 the required alignment. Avoid using division in case the value is
86 negative. Assume the alignment is a power of two. */
87 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
88
89 /* Similar, but round to the next highest integer that meets the
90 alignment. */
91 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
92
93 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
94 during rtl generation. If they are different register numbers, this is
95 always true. It may also be true if
96 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
97 generation. See fix_lexical_addr for details. */
98
99 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
100 #define NEED_SEPARATE_AP
101 #endif
102
103 /* Nonzero if function being compiled doesn't contain any calls
104 (ignoring the prologue and epilogue). This is set prior to
105 local register allocation and is valid for the remaining
106 compiler passes. */
107 int current_function_is_leaf;
108
109 /* Nonzero if function being compiled doesn't modify the stack pointer
110 (ignoring the prologue and epilogue). This is only valid after
111 life_analysis has run. */
112 int current_function_sp_is_unchanging;
113
114 /* Nonzero if the function being compiled is a leaf function which only
115 uses leaf registers. This is valid after reload (specifically after
116 sched2) and is useful only if the port defines LEAF_REGISTERS. */
117 int current_function_uses_only_leaf_regs;
118
119 /* Nonzero once virtual register instantiation has been done.
120 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
121 static int virtuals_instantiated;
122
123 /* These variables hold pointers to functions to
124 save and restore machine-specific data,
125 in push_function_context and pop_function_context. */
126 void (*init_machine_status) PARAMS ((struct function *));
127 void (*save_machine_status) PARAMS ((struct function *));
128 void (*restore_machine_status) PARAMS ((struct function *));
129 void (*mark_machine_status) PARAMS ((struct function *));
130 void (*free_machine_status) PARAMS ((struct function *));
131
132 /* Likewise, but for language-specific data. */
133 void (*init_lang_status) PARAMS ((struct function *));
134 void (*save_lang_status) PARAMS ((struct function *));
135 void (*restore_lang_status) PARAMS ((struct function *));
136 void (*mark_lang_status) PARAMS ((struct function *));
137 void (*free_lang_status) PARAMS ((struct function *));
138
139 /* The FUNCTION_DECL for an inline function currently being expanded. */
140 tree inline_function_decl;
141
142 /* The currently compiled function. */
143 struct function *cfun = 0;
144
145 /* Global list of all compiled functions. */
146 struct function *all_functions = 0;
147
148 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
149 static int *prologue;
150 static int *epilogue;
151 \f
152 /* In order to evaluate some expressions, such as function calls returning
153 structures in memory, we need to temporarily allocate stack locations.
154 We record each allocated temporary in the following structure.
155
156 Associated with each temporary slot is a nesting level. When we pop up
157 one level, all temporaries associated with the previous level are freed.
158 Normally, all temporaries are freed after the execution of the statement
159 in which they were created. However, if we are inside a ({...}) grouping,
160 the result may be in a temporary and hence must be preserved. If the
161 result could be in a temporary, we preserve it if we can determine which
162 one it is in. If we cannot determine which temporary may contain the
163 result, all temporaries are preserved. A temporary is preserved by
164 pretending it was allocated at the previous nesting level.
165
166 Automatic variables are also assigned temporary slots, at the nesting
167 level where they are defined. They are marked a "kept" so that
168 free_temp_slots will not free them. */
169
170 struct temp_slot
171 {
172 /* Points to next temporary slot. */
173 struct temp_slot *next;
174 /* The rtx to used to reference the slot. */
175 rtx slot;
176 /* The rtx used to represent the address if not the address of the
177 slot above. May be an EXPR_LIST if multiple addresses exist. */
178 rtx address;
179 /* The alignment (in bits) of the slot. */
180 int align;
181 /* The size, in units, of the slot. */
182 HOST_WIDE_INT size;
183 /* The alias set for the slot. If the alias set is zero, we don't
184 know anything about the alias set of the slot. We must only
185 reuse a slot if it is assigned an object of the same alias set.
186 Otherwise, the rest of the compiler may assume that the new use
187 of the slot cannot alias the old use of the slot, which is
188 false. If the slot has alias set zero, then we can't reuse the
189 slot at all, since we have no idea what alias set may have been
190 imposed on the memory. For example, if the stack slot is the
191 call frame for an inline functioned, we have no idea what alias
192 sets will be assigned to various pieces of the call frame. */
193 int alias_set;
194 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
195 tree rtl_expr;
196 /* Non-zero if this temporary is currently in use. */
197 char in_use;
198 /* Non-zero if this temporary has its address taken. */
199 char addr_taken;
200 /* Nesting level at which this slot is being used. */
201 int level;
202 /* Non-zero if this should survive a call to free_temp_slots. */
203 int keep;
204 /* The offset of the slot from the frame_pointer, including extra space
205 for alignment. This info is for combine_temp_slots. */
206 HOST_WIDE_INT base_offset;
207 /* The size of the slot, including extra space for alignment. This
208 info is for combine_temp_slots. */
209 HOST_WIDE_INT full_size;
210 };
211 \f
212 /* This structure is used to record MEMs or pseudos used to replace VAR, any
213 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
214 maintain this list in case two operands of an insn were required to match;
215 in that case we must ensure we use the same replacement. */
216
217 struct fixup_replacement
218 {
219 rtx old;
220 rtx new;
221 struct fixup_replacement *next;
222 };
223
224 struct insns_for_mem_entry {
225 /* The KEY in HE will be a MEM. */
226 struct hash_entry he;
227 /* These are the INSNS which reference the MEM. */
228 rtx insns;
229 };
230
231 /* Forward declarations. */
232
233 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
234 int, struct function *));
235 static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode,
236 HOST_WIDE_INT, int, tree));
237 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
238 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
239 enum machine_mode, enum machine_mode,
240 int, int, int, struct hash_table *));
241 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
242 struct hash_table *));
243 static struct fixup_replacement
244 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
245 static void fixup_var_refs_insns PARAMS ((rtx, enum machine_mode, int,
246 rtx, int, struct hash_table *));
247 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
248 struct fixup_replacement **));
249 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
250 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
251 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
252 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
253 static void instantiate_decls PARAMS ((tree, int));
254 static void instantiate_decls_1 PARAMS ((tree, int));
255 static void instantiate_decl PARAMS ((rtx, int, int));
256 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
257 static void delete_handlers PARAMS ((void));
258 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
259 struct args_size *));
260 #ifndef ARGS_GROW_DOWNWARD
261 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
262 tree));
263 #endif
264 #ifdef ARGS_GROW_DOWNWARD
265 static tree round_down PARAMS ((tree, int));
266 #endif
267 static rtx round_trampoline_addr PARAMS ((rtx));
268 static tree blocks_nreverse PARAMS ((tree));
269 static int all_blocks PARAMS ((tree, tree *));
270 /* We always define `record_insns' even if its not used so that we
271 can always export `prologue_epilogue_contains'. */
272 static int *record_insns PARAMS ((rtx)) ATTRIBUTE_UNUSED;
273 static int contains PARAMS ((rtx, int *));
274 #ifdef HAVE_return
275 static void emit_return_into_block PARAMS ((basic_block));
276 #endif
277 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
278 static boolean purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
279 struct hash_table *));
280 static int is_addressof PARAMS ((rtx *, void *));
281 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
282 struct hash_table *,
283 hash_table_key));
284 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
285 static boolean insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
286 static int insns_for_mem_walk PARAMS ((rtx *, void *));
287 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
288 static void mark_temp_slot PARAMS ((struct temp_slot *));
289 static void mark_function_status PARAMS ((struct function *));
290 static void mark_function_chain PARAMS ((void *));
291 static void prepare_function_start PARAMS ((void));
292 static void do_clobber_return_reg PARAMS ((rtx, void *));
293 static void do_use_return_reg PARAMS ((rtx, void *));
294 \f
295 /* Pointer to chain of `struct function' for containing functions. */
296 struct function *outer_function_chain;
297
298 /* Given a function decl for a containing function,
299 return the `struct function' for it. */
300
301 struct function *
302 find_function_data (decl)
303 tree decl;
304 {
305 struct function *p;
306
307 for (p = outer_function_chain; p; p = p->next)
308 if (p->decl == decl)
309 return p;
310
311 abort ();
312 }
313
314 /* Save the current context for compilation of a nested function.
315 This is called from language-specific code. The caller should use
316 the save_lang_status callback to save any language-specific state,
317 since this function knows only about language-independent
318 variables. */
319
320 void
321 push_function_context_to (context)
322 tree context;
323 {
324 struct function *p, *context_data;
325
326 if (context)
327 {
328 context_data = (context == current_function_decl
329 ? cfun
330 : find_function_data (context));
331 context_data->contains_functions = 1;
332 }
333
334 if (cfun == 0)
335 init_dummy_function_start ();
336 p = cfun;
337
338 p->next = outer_function_chain;
339 outer_function_chain = p;
340 p->fixup_var_refs_queue = 0;
341
342 save_tree_status (p);
343 if (save_lang_status)
344 (*save_lang_status) (p);
345 if (save_machine_status)
346 (*save_machine_status) (p);
347
348 cfun = 0;
349 }
350
351 void
352 push_function_context ()
353 {
354 push_function_context_to (current_function_decl);
355 }
356
357 /* Restore the last saved context, at the end of a nested function.
358 This function is called from language-specific code. */
359
360 void
361 pop_function_context_from (context)
362 tree context ATTRIBUTE_UNUSED;
363 {
364 struct function *p = outer_function_chain;
365 struct var_refs_queue *queue;
366 struct var_refs_queue *next;
367
368 cfun = p;
369 outer_function_chain = p->next;
370
371 current_function_decl = p->decl;
372 reg_renumber = 0;
373
374 restore_tree_status (p);
375 restore_emit_status (p);
376
377 if (restore_machine_status)
378 (*restore_machine_status) (p);
379 if (restore_lang_status)
380 (*restore_lang_status) (p);
381
382 /* Finish doing put_var_into_stack for any of our variables
383 which became addressable during the nested function. */
384 for (queue = p->fixup_var_refs_queue; queue; queue = next)
385 {
386 next = queue->next;
387 fixup_var_refs (queue->modified, queue->promoted_mode,
388 queue->unsignedp, 0);
389 free (queue);
390 }
391 p->fixup_var_refs_queue = 0;
392
393 /* Reset variables that have known state during rtx generation. */
394 rtx_equal_function_value_matters = 1;
395 virtuals_instantiated = 0;
396 }
397
398 void
399 pop_function_context ()
400 {
401 pop_function_context_from (current_function_decl);
402 }
403
404 /* Clear out all parts of the state in F that can safely be discarded
405 after the function has been parsed, but not compiled, to let
406 garbage collection reclaim the memory. */
407
408 void
409 free_after_parsing (f)
410 struct function *f;
411 {
412 /* f->expr->forced_labels is used by code generation. */
413 /* f->emit->regno_reg_rtx is used by code generation. */
414 /* f->varasm is used by code generation. */
415 /* f->eh->eh_return_stub_label is used by code generation. */
416
417 if (free_lang_status)
418 (*free_lang_status) (f);
419 free_stmt_status (f);
420 }
421
422 /* Clear out all parts of the state in F that can safely be discarded
423 after the function has been compiled, to let garbage collection
424 reclaim the memory. */
425
426 void
427 free_after_compilation (f)
428 struct function *f;
429 {
430 free_eh_status (f);
431 free_expr_status (f);
432 free_emit_status (f);
433 free_varasm_status (f);
434
435 if (free_machine_status)
436 (*free_machine_status) (f);
437
438 if (f->x_parm_reg_stack_loc)
439 free (f->x_parm_reg_stack_loc);
440
441 f->arg_offset_rtx = NULL;
442 f->return_rtx = NULL;
443 f->internal_arg_pointer = NULL;
444 f->x_nonlocal_labels = NULL;
445 f->x_nonlocal_goto_handler_slots = NULL;
446 f->x_nonlocal_goto_handler_labels = NULL;
447 f->x_nonlocal_goto_stack_level = NULL;
448 f->x_cleanup_label = NULL;
449 f->x_return_label = NULL;
450 f->x_save_expr_regs = NULL;
451 f->x_stack_slot_list = NULL;
452 f->x_rtl_expr_chain = NULL;
453 f->x_tail_recursion_label = NULL;
454 f->x_tail_recursion_reentry = NULL;
455 f->x_arg_pointer_save_area = NULL;
456 f->x_context_display = NULL;
457 f->x_trampoline_list = NULL;
458 f->x_parm_birth_insn = NULL;
459 f->x_last_parm_insn = NULL;
460 f->x_parm_reg_stack_loc = NULL;
461 f->x_temp_slots = NULL;
462 f->fixup_var_refs_queue = NULL;
463 f->original_arg_vector = NULL;
464 f->original_decl_initial = NULL;
465 f->inl_last_parm_insn = NULL;
466 f->epilogue_delay_list = NULL;
467 }
468
469 \f
470 /* Allocate fixed slots in the stack frame of the current function. */
471
472 /* Return size needed for stack frame based on slots so far allocated in
473 function F.
474 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
475 the caller may have to do that. */
476
477 HOST_WIDE_INT
478 get_func_frame_size (f)
479 struct function *f;
480 {
481 #ifdef FRAME_GROWS_DOWNWARD
482 return -f->x_frame_offset;
483 #else
484 return f->x_frame_offset;
485 #endif
486 }
487
488 /* Return size needed for stack frame based on slots so far allocated.
489 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
490 the caller may have to do that. */
491 HOST_WIDE_INT
492 get_frame_size ()
493 {
494 return get_func_frame_size (cfun);
495 }
496
497 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
498 with machine mode MODE.
499
500 ALIGN controls the amount of alignment for the address of the slot:
501 0 means according to MODE,
502 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
503 positive specifies alignment boundary in bits.
504
505 We do not round to stack_boundary here.
506
507 FUNCTION specifies the function to allocate in. */
508
509 static rtx
510 assign_stack_local_1 (mode, size, align, function)
511 enum machine_mode mode;
512 HOST_WIDE_INT size;
513 int align;
514 struct function *function;
515 {
516 register rtx x, addr;
517 int bigend_correction = 0;
518 int alignment;
519
520 /* Allocate in the memory associated with the function in whose frame
521 we are assigning. */
522 if (function != cfun)
523 push_obstacks (function->function_obstack,
524 function->function_maybepermanent_obstack);
525
526 if (align == 0)
527 {
528 tree type;
529
530 alignment = GET_MODE_ALIGNMENT (mode);
531 if (mode == BLKmode)
532 alignment = BIGGEST_ALIGNMENT;
533
534 /* Allow the target to (possibly) increase the alignment of this
535 stack slot. */
536 type = type_for_mode (mode, 0);
537 if (type)
538 alignment = LOCAL_ALIGNMENT (type, alignment);
539
540 alignment /= BITS_PER_UNIT;
541 }
542 else if (align == -1)
543 {
544 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
545 size = CEIL_ROUND (size, alignment);
546 }
547 else
548 alignment = align / BITS_PER_UNIT;
549
550 #ifdef FRAME_GROWS_DOWNWARD
551 function->x_frame_offset -= size;
552 #endif
553
554 /* Ignore alignment we can't do with expected alignment of the boundary. */
555 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
556 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
557
558 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
559 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
560
561 /* Round frame offset to that alignment.
562 We must be careful here, since FRAME_OFFSET might be negative and
563 division with a negative dividend isn't as well defined as we might
564 like. So we instead assume that ALIGNMENT is a power of two and
565 use logical operations which are unambiguous. */
566 #ifdef FRAME_GROWS_DOWNWARD
567 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
568 #else
569 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
570 #endif
571
572 /* On a big-endian machine, if we are allocating more space than we will use,
573 use the least significant bytes of those that are allocated. */
574 if (BYTES_BIG_ENDIAN && mode != BLKmode)
575 bigend_correction = size - GET_MODE_SIZE (mode);
576
577 /* If we have already instantiated virtual registers, return the actual
578 address relative to the frame pointer. */
579 if (function == cfun && virtuals_instantiated)
580 addr = plus_constant (frame_pointer_rtx,
581 (frame_offset + bigend_correction
582 + STARTING_FRAME_OFFSET));
583 else
584 addr = plus_constant (virtual_stack_vars_rtx,
585 function->x_frame_offset + bigend_correction);
586
587 #ifndef FRAME_GROWS_DOWNWARD
588 function->x_frame_offset += size;
589 #endif
590
591 x = gen_rtx_MEM (mode, addr);
592
593 function->x_stack_slot_list
594 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
595
596 if (function != cfun)
597 pop_obstacks ();
598
599 return x;
600 }
601
602 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
603 current function. */
604 rtx
605 assign_stack_local (mode, size, align)
606 enum machine_mode mode;
607 HOST_WIDE_INT size;
608 int align;
609 {
610 return assign_stack_local_1 (mode, size, align, cfun);
611 }
612 \f
613 /* Allocate a temporary stack slot and record it for possible later
614 reuse.
615
616 MODE is the machine mode to be given to the returned rtx.
617
618 SIZE is the size in units of the space required. We do no rounding here
619 since assign_stack_local will do any required rounding.
620
621 KEEP is 1 if this slot is to be retained after a call to
622 free_temp_slots. Automatic variables for a block are allocated
623 with this flag. KEEP is 2 if we allocate a longer term temporary,
624 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
625 if we are to allocate something at an inner level to be treated as
626 a variable in the block (e.g., a SAVE_EXPR).
627
628 TYPE is the type that will be used for the stack slot. */
629
630 static rtx
631 assign_stack_temp_for_type (mode, size, keep, type)
632 enum machine_mode mode;
633 HOST_WIDE_INT size;
634 int keep;
635 tree type;
636 {
637 int align;
638 int alias_set;
639 struct temp_slot *p, *best_p = 0;
640
641 /* If SIZE is -1 it means that somebody tried to allocate a temporary
642 of a variable size. */
643 if (size == -1)
644 abort ();
645
646 /* If we know the alias set for the memory that will be used, use
647 it. If there's no TYPE, then we don't know anything about the
648 alias set for the memory. */
649 if (type)
650 alias_set = get_alias_set (type);
651 else
652 alias_set = 0;
653
654 align = GET_MODE_ALIGNMENT (mode);
655 if (mode == BLKmode)
656 align = BIGGEST_ALIGNMENT;
657
658 if (! type)
659 type = type_for_mode (mode, 0);
660 if (type)
661 align = LOCAL_ALIGNMENT (type, align);
662
663 /* Try to find an available, already-allocated temporary of the proper
664 mode which meets the size and alignment requirements. Choose the
665 smallest one with the closest alignment. */
666 for (p = temp_slots; p; p = p->next)
667 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
668 && ! p->in_use
669 && (!flag_strict_aliasing
670 || (alias_set && p->alias_set == alias_set))
671 && (best_p == 0 || best_p->size > p->size
672 || (best_p->size == p->size && best_p->align > p->align)))
673 {
674 if (p->align == align && p->size == size)
675 {
676 best_p = 0;
677 break;
678 }
679 best_p = p;
680 }
681
682 /* Make our best, if any, the one to use. */
683 if (best_p)
684 {
685 /* If there are enough aligned bytes left over, make them into a new
686 temp_slot so that the extra bytes don't get wasted. Do this only
687 for BLKmode slots, so that we can be sure of the alignment. */
688 if (GET_MODE (best_p->slot) == BLKmode
689 /* We can't split slots if -fstrict-aliasing because the
690 information about the alias set for the new slot will be
691 lost. */
692 && !flag_strict_aliasing)
693 {
694 int alignment = best_p->align / BITS_PER_UNIT;
695 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
696
697 if (best_p->size - rounded_size >= alignment)
698 {
699 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
700 p->in_use = p->addr_taken = 0;
701 p->size = best_p->size - rounded_size;
702 p->base_offset = best_p->base_offset + rounded_size;
703 p->full_size = best_p->full_size - rounded_size;
704 p->slot = gen_rtx_MEM (BLKmode,
705 plus_constant (XEXP (best_p->slot, 0),
706 rounded_size));
707 p->align = best_p->align;
708 p->address = 0;
709 p->rtl_expr = 0;
710 p->next = temp_slots;
711 temp_slots = p;
712
713 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
714 stack_slot_list);
715
716 best_p->size = rounded_size;
717 best_p->full_size = rounded_size;
718 }
719 }
720
721 p = best_p;
722 }
723
724 /* If we still didn't find one, make a new temporary. */
725 if (p == 0)
726 {
727 HOST_WIDE_INT frame_offset_old = frame_offset;
728
729 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
730
731 /* We are passing an explicit alignment request to assign_stack_local.
732 One side effect of that is assign_stack_local will not round SIZE
733 to ensure the frame offset remains suitably aligned.
734
735 So for requests which depended on the rounding of SIZE, we go ahead
736 and round it now. We also make sure ALIGNMENT is at least
737 BIGGEST_ALIGNMENT. */
738 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
739 abort();
740 p->slot = assign_stack_local (mode,
741 (mode == BLKmode
742 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
743 : size),
744 align);
745
746 p->align = align;
747 p->alias_set = alias_set;
748
749 /* The following slot size computation is necessary because we don't
750 know the actual size of the temporary slot until assign_stack_local
751 has performed all the frame alignment and size rounding for the
752 requested temporary. Note that extra space added for alignment
753 can be either above or below this stack slot depending on which
754 way the frame grows. We include the extra space if and only if it
755 is above this slot. */
756 #ifdef FRAME_GROWS_DOWNWARD
757 p->size = frame_offset_old - frame_offset;
758 #else
759 p->size = size;
760 #endif
761
762 /* Now define the fields used by combine_temp_slots. */
763 #ifdef FRAME_GROWS_DOWNWARD
764 p->base_offset = frame_offset;
765 p->full_size = frame_offset_old - frame_offset;
766 #else
767 p->base_offset = frame_offset_old;
768 p->full_size = frame_offset - frame_offset_old;
769 #endif
770 p->address = 0;
771 p->next = temp_slots;
772 temp_slots = p;
773 }
774
775 p->in_use = 1;
776 p->addr_taken = 0;
777 p->rtl_expr = seq_rtl_expr;
778
779 if (keep == 2)
780 {
781 p->level = target_temp_slot_level;
782 p->keep = 0;
783 }
784 else if (keep == 3)
785 {
786 p->level = var_temp_slot_level;
787 p->keep = 0;
788 }
789 else
790 {
791 p->level = temp_slot_level;
792 p->keep = keep;
793 }
794
795 /* We may be reusing an old slot, so clear any MEM flags that may have been
796 set from before. */
797 RTX_UNCHANGING_P (p->slot) = 0;
798 MEM_IN_STRUCT_P (p->slot) = 0;
799 MEM_SCALAR_P (p->slot) = 0;
800 MEM_ALIAS_SET (p->slot) = 0;
801 return p->slot;
802 }
803
804 /* Allocate a temporary stack slot and record it for possible later
805 reuse. First three arguments are same as in preceding function. */
806
807 rtx
808 assign_stack_temp (mode, size, keep)
809 enum machine_mode mode;
810 HOST_WIDE_INT size;
811 int keep;
812 {
813 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
814 }
815 \f
816 /* Assign a temporary of given TYPE.
817 KEEP is as for assign_stack_temp.
818 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
819 it is 0 if a register is OK.
820 DONT_PROMOTE is 1 if we should not promote values in register
821 to wider modes. */
822
823 rtx
824 assign_temp (type, keep, memory_required, dont_promote)
825 tree type;
826 int keep;
827 int memory_required;
828 int dont_promote ATTRIBUTE_UNUSED;
829 {
830 enum machine_mode mode = TYPE_MODE (type);
831 #ifndef PROMOTE_FOR_CALL_ONLY
832 int unsignedp = TREE_UNSIGNED (type);
833 #endif
834
835 if (mode == BLKmode || memory_required)
836 {
837 HOST_WIDE_INT size = int_size_in_bytes (type);
838 rtx tmp;
839
840 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
841 problems with allocating the stack space. */
842 if (size == 0)
843 size = 1;
844
845 /* Unfortunately, we don't yet know how to allocate variable-sized
846 temporaries. However, sometimes we have a fixed upper limit on
847 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
848 instead. This is the case for Chill variable-sized strings. */
849 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
850 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
851 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
852 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
853
854 tmp = assign_stack_temp_for_type (mode, size, keep, type);
855 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
856 return tmp;
857 }
858
859 #ifndef PROMOTE_FOR_CALL_ONLY
860 if (! dont_promote)
861 mode = promote_mode (type, mode, &unsignedp, 0);
862 #endif
863
864 return gen_reg_rtx (mode);
865 }
866 \f
867 /* Combine temporary stack slots which are adjacent on the stack.
868
869 This allows for better use of already allocated stack space. This is only
870 done for BLKmode slots because we can be sure that we won't have alignment
871 problems in this case. */
872
873 void
874 combine_temp_slots ()
875 {
876 struct temp_slot *p, *q;
877 struct temp_slot *prev_p, *prev_q;
878 int num_slots;
879
880 /* We can't combine slots, because the information about which slot
881 is in which alias set will be lost. */
882 if (flag_strict_aliasing)
883 return;
884
885 /* If there are a lot of temp slots, don't do anything unless
886 high levels of optimizaton. */
887 if (! flag_expensive_optimizations)
888 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
889 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
890 return;
891
892 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
893 {
894 int delete_p = 0;
895
896 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
897 for (q = p->next, prev_q = p; q; q = prev_q->next)
898 {
899 int delete_q = 0;
900 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
901 {
902 if (p->base_offset + p->full_size == q->base_offset)
903 {
904 /* Q comes after P; combine Q into P. */
905 p->size += q->size;
906 p->full_size += q->full_size;
907 delete_q = 1;
908 }
909 else if (q->base_offset + q->full_size == p->base_offset)
910 {
911 /* P comes after Q; combine P into Q. */
912 q->size += p->size;
913 q->full_size += p->full_size;
914 delete_p = 1;
915 break;
916 }
917 }
918 /* Either delete Q or advance past it. */
919 if (delete_q)
920 prev_q->next = q->next;
921 else
922 prev_q = q;
923 }
924 /* Either delete P or advance past it. */
925 if (delete_p)
926 {
927 if (prev_p)
928 prev_p->next = p->next;
929 else
930 temp_slots = p->next;
931 }
932 else
933 prev_p = p;
934 }
935 }
936 \f
937 /* Find the temp slot corresponding to the object at address X. */
938
939 static struct temp_slot *
940 find_temp_slot_from_address (x)
941 rtx x;
942 {
943 struct temp_slot *p;
944 rtx next;
945
946 for (p = temp_slots; p; p = p->next)
947 {
948 if (! p->in_use)
949 continue;
950
951 else if (XEXP (p->slot, 0) == x
952 || p->address == x
953 || (GET_CODE (x) == PLUS
954 && XEXP (x, 0) == virtual_stack_vars_rtx
955 && GET_CODE (XEXP (x, 1)) == CONST_INT
956 && INTVAL (XEXP (x, 1)) >= p->base_offset
957 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
958 return p;
959
960 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
961 for (next = p->address; next; next = XEXP (next, 1))
962 if (XEXP (next, 0) == x)
963 return p;
964 }
965
966 /* If we have a sum involving a register, see if it points to a temp
967 slot. */
968 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
969 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
970 return p;
971 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
972 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
973 return p;
974
975 return 0;
976 }
977
978 /* Indicate that NEW is an alternate way of referring to the temp slot
979 that previously was known by OLD. */
980
981 void
982 update_temp_slot_address (old, new)
983 rtx old, new;
984 {
985 struct temp_slot *p;
986
987 if (rtx_equal_p (old, new))
988 return;
989
990 p = find_temp_slot_from_address (old);
991
992 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
993 is a register, see if one operand of the PLUS is a temporary
994 location. If so, NEW points into it. Otherwise, if both OLD and
995 NEW are a PLUS and if there is a register in common between them.
996 If so, try a recursive call on those values. */
997 if (p == 0)
998 {
999 if (GET_CODE (old) != PLUS)
1000 return;
1001
1002 if (GET_CODE (new) == REG)
1003 {
1004 update_temp_slot_address (XEXP (old, 0), new);
1005 update_temp_slot_address (XEXP (old, 1), new);
1006 return;
1007 }
1008 else if (GET_CODE (new) != PLUS)
1009 return;
1010
1011 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1012 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1013 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1014 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1015 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1016 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1017 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1018 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1019
1020 return;
1021 }
1022
1023 /* Otherwise add an alias for the temp's address. */
1024 else if (p->address == 0)
1025 p->address = new;
1026 else
1027 {
1028 if (GET_CODE (p->address) != EXPR_LIST)
1029 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1030
1031 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1032 }
1033 }
1034
1035 /* If X could be a reference to a temporary slot, mark the fact that its
1036 address was taken. */
1037
1038 void
1039 mark_temp_addr_taken (x)
1040 rtx x;
1041 {
1042 struct temp_slot *p;
1043
1044 if (x == 0)
1045 return;
1046
1047 /* If X is not in memory or is at a constant address, it cannot be in
1048 a temporary slot. */
1049 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1050 return;
1051
1052 p = find_temp_slot_from_address (XEXP (x, 0));
1053 if (p != 0)
1054 p->addr_taken = 1;
1055 }
1056
1057 /* If X could be a reference to a temporary slot, mark that slot as
1058 belonging to the to one level higher than the current level. If X
1059 matched one of our slots, just mark that one. Otherwise, we can't
1060 easily predict which it is, so upgrade all of them. Kept slots
1061 need not be touched.
1062
1063 This is called when an ({...}) construct occurs and a statement
1064 returns a value in memory. */
1065
1066 void
1067 preserve_temp_slots (x)
1068 rtx x;
1069 {
1070 struct temp_slot *p = 0;
1071
1072 /* If there is no result, we still might have some objects whose address
1073 were taken, so we need to make sure they stay around. */
1074 if (x == 0)
1075 {
1076 for (p = temp_slots; p; p = p->next)
1077 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1078 p->level--;
1079
1080 return;
1081 }
1082
1083 /* If X is a register that is being used as a pointer, see if we have
1084 a temporary slot we know it points to. To be consistent with
1085 the code below, we really should preserve all non-kept slots
1086 if we can't find a match, but that seems to be much too costly. */
1087 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1088 p = find_temp_slot_from_address (x);
1089
1090 /* If X is not in memory or is at a constant address, it cannot be in
1091 a temporary slot, but it can contain something whose address was
1092 taken. */
1093 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1094 {
1095 for (p = temp_slots; p; p = p->next)
1096 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1097 p->level--;
1098
1099 return;
1100 }
1101
1102 /* First see if we can find a match. */
1103 if (p == 0)
1104 p = find_temp_slot_from_address (XEXP (x, 0));
1105
1106 if (p != 0)
1107 {
1108 /* Move everything at our level whose address was taken to our new
1109 level in case we used its address. */
1110 struct temp_slot *q;
1111
1112 if (p->level == temp_slot_level)
1113 {
1114 for (q = temp_slots; q; q = q->next)
1115 if (q != p && q->addr_taken && q->level == p->level)
1116 q->level--;
1117
1118 p->level--;
1119 p->addr_taken = 0;
1120 }
1121 return;
1122 }
1123
1124 /* Otherwise, preserve all non-kept slots at this level. */
1125 for (p = temp_slots; p; p = p->next)
1126 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1127 p->level--;
1128 }
1129
1130 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1131 with that RTL_EXPR, promote it into a temporary slot at the present
1132 level so it will not be freed when we free slots made in the
1133 RTL_EXPR. */
1134
1135 void
1136 preserve_rtl_expr_result (x)
1137 rtx x;
1138 {
1139 struct temp_slot *p;
1140
1141 /* If X is not in memory or is at a constant address, it cannot be in
1142 a temporary slot. */
1143 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1144 return;
1145
1146 /* If we can find a match, move it to our level unless it is already at
1147 an upper level. */
1148 p = find_temp_slot_from_address (XEXP (x, 0));
1149 if (p != 0)
1150 {
1151 p->level = MIN (p->level, temp_slot_level);
1152 p->rtl_expr = 0;
1153 }
1154
1155 return;
1156 }
1157
1158 /* Free all temporaries used so far. This is normally called at the end
1159 of generating code for a statement. Don't free any temporaries
1160 currently in use for an RTL_EXPR that hasn't yet been emitted.
1161 We could eventually do better than this since it can be reused while
1162 generating the same RTL_EXPR, but this is complex and probably not
1163 worthwhile. */
1164
1165 void
1166 free_temp_slots ()
1167 {
1168 struct temp_slot *p;
1169
1170 for (p = temp_slots; p; p = p->next)
1171 if (p->in_use && p->level == temp_slot_level && ! p->keep
1172 && p->rtl_expr == 0)
1173 p->in_use = 0;
1174
1175 combine_temp_slots ();
1176 }
1177
1178 /* Free all temporary slots used in T, an RTL_EXPR node. */
1179
1180 void
1181 free_temps_for_rtl_expr (t)
1182 tree t;
1183 {
1184 struct temp_slot *p;
1185
1186 for (p = temp_slots; p; p = p->next)
1187 if (p->rtl_expr == t)
1188 p->in_use = 0;
1189
1190 combine_temp_slots ();
1191 }
1192
1193 /* Mark all temporaries ever allocated in this function as not suitable
1194 for reuse until the current level is exited. */
1195
1196 void
1197 mark_all_temps_used ()
1198 {
1199 struct temp_slot *p;
1200
1201 for (p = temp_slots; p; p = p->next)
1202 {
1203 p->in_use = p->keep = 1;
1204 p->level = MIN (p->level, temp_slot_level);
1205 }
1206 }
1207
1208 /* Push deeper into the nesting level for stack temporaries. */
1209
1210 void
1211 push_temp_slots ()
1212 {
1213 temp_slot_level++;
1214 }
1215
1216 /* Likewise, but save the new level as the place to allocate variables
1217 for blocks. */
1218
1219 #if 0
1220 void
1221 push_temp_slots_for_block ()
1222 {
1223 push_temp_slots ();
1224
1225 var_temp_slot_level = temp_slot_level;
1226 }
1227
1228 /* Likewise, but save the new level as the place to allocate temporaries
1229 for TARGET_EXPRs. */
1230
1231 void
1232 push_temp_slots_for_target ()
1233 {
1234 push_temp_slots ();
1235
1236 target_temp_slot_level = temp_slot_level;
1237 }
1238
1239 /* Set and get the value of target_temp_slot_level. The only
1240 permitted use of these functions is to save and restore this value. */
1241
1242 int
1243 get_target_temp_slot_level ()
1244 {
1245 return target_temp_slot_level;
1246 }
1247
1248 void
1249 set_target_temp_slot_level (level)
1250 int level;
1251 {
1252 target_temp_slot_level = level;
1253 }
1254 #endif
1255
1256 /* Pop a temporary nesting level. All slots in use in the current level
1257 are freed. */
1258
1259 void
1260 pop_temp_slots ()
1261 {
1262 struct temp_slot *p;
1263
1264 for (p = temp_slots; p; p = p->next)
1265 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1266 p->in_use = 0;
1267
1268 combine_temp_slots ();
1269
1270 temp_slot_level--;
1271 }
1272
1273 /* Initialize temporary slots. */
1274
1275 void
1276 init_temp_slots ()
1277 {
1278 /* We have not allocated any temporaries yet. */
1279 temp_slots = 0;
1280 temp_slot_level = 0;
1281 var_temp_slot_level = 0;
1282 target_temp_slot_level = 0;
1283 }
1284 \f
1285 /* Retroactively move an auto variable from a register to a stack slot.
1286 This is done when an address-reference to the variable is seen. */
1287
1288 void
1289 put_var_into_stack (decl)
1290 tree decl;
1291 {
1292 register rtx reg;
1293 enum machine_mode promoted_mode, decl_mode;
1294 struct function *function = 0;
1295 tree context;
1296 int can_use_addressof;
1297
1298 context = decl_function_context (decl);
1299
1300 /* Get the current rtl used for this object and its original mode. */
1301 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1302
1303 /* No need to do anything if decl has no rtx yet
1304 since in that case caller is setting TREE_ADDRESSABLE
1305 and a stack slot will be assigned when the rtl is made. */
1306 if (reg == 0)
1307 return;
1308
1309 /* Get the declared mode for this object. */
1310 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1311 : DECL_MODE (decl));
1312 /* Get the mode it's actually stored in. */
1313 promoted_mode = GET_MODE (reg);
1314
1315 /* If this variable comes from an outer function,
1316 find that function's saved context. */
1317 if (context != current_function_decl && context != inline_function_decl)
1318 for (function = outer_function_chain; function; function = function->next)
1319 if (function->decl == context)
1320 break;
1321
1322 /* If this is a variable-size object with a pseudo to address it,
1323 put that pseudo into the stack, if the var is nonlocal. */
1324 if (DECL_NONLOCAL (decl)
1325 && GET_CODE (reg) == MEM
1326 && GET_CODE (XEXP (reg, 0)) == REG
1327 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1328 {
1329 reg = XEXP (reg, 0);
1330 decl_mode = promoted_mode = GET_MODE (reg);
1331 }
1332
1333 can_use_addressof
1334 = (function == 0
1335 && optimize > 0
1336 /* FIXME make it work for promoted modes too */
1337 && decl_mode == promoted_mode
1338 #ifdef NON_SAVING_SETJMP
1339 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1340 #endif
1341 );
1342
1343 /* If we can't use ADDRESSOF, make sure we see through one we already
1344 generated. */
1345 if (! can_use_addressof && GET_CODE (reg) == MEM
1346 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1347 reg = XEXP (XEXP (reg, 0), 0);
1348
1349 /* Now we should have a value that resides in one or more pseudo regs. */
1350
1351 if (GET_CODE (reg) == REG)
1352 {
1353 /* If this variable lives in the current function and we don't need
1354 to put things in the stack for the sake of setjmp, try to keep it
1355 in a register until we know we actually need the address. */
1356 if (can_use_addressof)
1357 gen_mem_addressof (reg, decl);
1358 else
1359 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1360 promoted_mode, decl_mode,
1361 TREE_SIDE_EFFECTS (decl), 0,
1362 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1363 0);
1364 }
1365 else if (GET_CODE (reg) == CONCAT)
1366 {
1367 /* A CONCAT contains two pseudos; put them both in the stack.
1368 We do it so they end up consecutive. */
1369 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1370 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1371 #ifdef FRAME_GROWS_DOWNWARD
1372 /* Since part 0 should have a lower address, do it second. */
1373 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1374 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1375 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1376 0);
1377 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1378 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1379 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1380 0);
1381 #else
1382 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1383 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1384 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1385 0);
1386 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1387 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1388 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1389 0);
1390 #endif
1391
1392 /* Change the CONCAT into a combined MEM for both parts. */
1393 PUT_CODE (reg, MEM);
1394 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1395 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1396
1397 /* The two parts are in memory order already.
1398 Use the lower parts address as ours. */
1399 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1400 /* Prevent sharing of rtl that might lose. */
1401 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1402 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1403 }
1404 else
1405 return;
1406
1407 if (current_function_check_memory_usage)
1408 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1409 XEXP (reg, 0), Pmode,
1410 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1411 TYPE_MODE (sizetype),
1412 GEN_INT (MEMORY_USE_RW),
1413 TYPE_MODE (integer_type_node));
1414 }
1415
1416 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1417 into the stack frame of FUNCTION (0 means the current function).
1418 DECL_MODE is the machine mode of the user-level data type.
1419 PROMOTED_MODE is the machine mode of the register.
1420 VOLATILE_P is nonzero if this is for a "volatile" decl.
1421 USED_P is nonzero if this reg might have already been used in an insn. */
1422
1423 static void
1424 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1425 original_regno, used_p, ht)
1426 struct function *function;
1427 rtx reg;
1428 tree type;
1429 enum machine_mode promoted_mode, decl_mode;
1430 int volatile_p;
1431 int original_regno;
1432 int used_p;
1433 struct hash_table *ht;
1434 {
1435 struct function *func = function ? function : cfun;
1436 rtx new = 0;
1437 int regno = original_regno;
1438
1439 if (regno == 0)
1440 regno = REGNO (reg);
1441
1442 if (regno < func->x_max_parm_reg)
1443 new = func->x_parm_reg_stack_loc[regno];
1444 if (new == 0)
1445 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1446
1447 PUT_CODE (reg, MEM);
1448 PUT_MODE (reg, decl_mode);
1449 XEXP (reg, 0) = XEXP (new, 0);
1450 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1451 MEM_VOLATILE_P (reg) = volatile_p;
1452
1453 /* If this is a memory ref that contains aggregate components,
1454 mark it as such for cse and loop optimize. If we are reusing a
1455 previously generated stack slot, then we need to copy the bit in
1456 case it was set for other reasons. For instance, it is set for
1457 __builtin_va_alist. */
1458 MEM_SET_IN_STRUCT_P (reg,
1459 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1460 MEM_ALIAS_SET (reg) = get_alias_set (type);
1461
1462 /* Now make sure that all refs to the variable, previously made
1463 when it was a register, are fixed up to be valid again. */
1464
1465 if (used_p && function != 0)
1466 {
1467 struct var_refs_queue *temp;
1468
1469 temp
1470 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1471 temp->modified = reg;
1472 temp->promoted_mode = promoted_mode;
1473 temp->unsignedp = TREE_UNSIGNED (type);
1474 temp->next = function->fixup_var_refs_queue;
1475 function->fixup_var_refs_queue = temp;
1476 }
1477 else if (used_p)
1478 /* Variable is local; fix it up now. */
1479 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1480 }
1481 \f
1482 static void
1483 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1484 rtx var;
1485 enum machine_mode promoted_mode;
1486 int unsignedp;
1487 struct hash_table *ht;
1488 {
1489 tree pending;
1490 rtx first_insn = get_insns ();
1491 struct sequence_stack *stack = seq_stack;
1492 tree rtl_exps = rtl_expr_chain;
1493
1494 /* Must scan all insns for stack-refs that exceed the limit. */
1495 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1496 stack == 0, ht);
1497 /* If there's a hash table, it must record all uses of VAR. */
1498 if (ht)
1499 return;
1500
1501 /* Scan all pending sequences too. */
1502 for (; stack; stack = stack->next)
1503 {
1504 push_to_sequence (stack->first);
1505 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1506 stack->first, stack->next != 0, 0);
1507 /* Update remembered end of sequence
1508 in case we added an insn at the end. */
1509 stack->last = get_last_insn ();
1510 end_sequence ();
1511 }
1512
1513 /* Scan all waiting RTL_EXPRs too. */
1514 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1515 {
1516 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1517 if (seq != const0_rtx && seq != 0)
1518 {
1519 push_to_sequence (seq);
1520 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1521 0);
1522 end_sequence ();
1523 }
1524 }
1525
1526 /* Scan the catch clauses for exception handling too. */
1527 push_to_sequence (catch_clauses);
1528 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1529 0, 0);
1530 end_sequence ();
1531 }
1532 \f
1533 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1534 some part of an insn. Return a struct fixup_replacement whose OLD
1535 value is equal to X. Allocate a new structure if no such entry exists. */
1536
1537 static struct fixup_replacement *
1538 find_fixup_replacement (replacements, x)
1539 struct fixup_replacement **replacements;
1540 rtx x;
1541 {
1542 struct fixup_replacement *p;
1543
1544 /* See if we have already replaced this. */
1545 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1546 ;
1547
1548 if (p == 0)
1549 {
1550 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1551 p->old = x;
1552 p->new = 0;
1553 p->next = *replacements;
1554 *replacements = p;
1555 }
1556
1557 return p;
1558 }
1559
1560 /* Scan the insn-chain starting with INSN for refs to VAR
1561 and fix them up. TOPLEVEL is nonzero if this chain is the
1562 main chain of insns for the current function. */
1563
1564 static void
1565 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1566 rtx var;
1567 enum machine_mode promoted_mode;
1568 int unsignedp;
1569 rtx insn;
1570 int toplevel;
1571 struct hash_table *ht;
1572 {
1573 rtx call_dest = 0;
1574 rtx insn_list = NULL_RTX;
1575
1576 /* If we already know which INSNs reference VAR there's no need
1577 to walk the entire instruction chain. */
1578 if (ht)
1579 {
1580 insn_list = ((struct insns_for_mem_entry *)
1581 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1582 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1583 insn_list = XEXP (insn_list, 1);
1584 }
1585
1586 while (insn)
1587 {
1588 rtx next = NEXT_INSN (insn);
1589 rtx set, prev, prev_set;
1590 rtx note;
1591
1592 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1593 {
1594 /* Remember the notes in case we delete the insn. */
1595 note = REG_NOTES (insn);
1596
1597 /* If this is a CLOBBER of VAR, delete it.
1598
1599 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1600 and REG_RETVAL notes too. */
1601 if (GET_CODE (PATTERN (insn)) == CLOBBER
1602 && (XEXP (PATTERN (insn), 0) == var
1603 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1604 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1605 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1606 {
1607 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1608 /* The REG_LIBCALL note will go away since we are going to
1609 turn INSN into a NOTE, so just delete the
1610 corresponding REG_RETVAL note. */
1611 remove_note (XEXP (note, 0),
1612 find_reg_note (XEXP (note, 0), REG_RETVAL,
1613 NULL_RTX));
1614
1615 /* In unoptimized compilation, we shouldn't call delete_insn
1616 except in jump.c doing warnings. */
1617 PUT_CODE (insn, NOTE);
1618 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1619 NOTE_SOURCE_FILE (insn) = 0;
1620 }
1621
1622 /* The insn to load VAR from a home in the arglist
1623 is now a no-op. When we see it, just delete it.
1624 Similarly if this is storing VAR from a register from which
1625 it was loaded in the previous insn. This will occur
1626 when an ADDRESSOF was made for an arglist slot. */
1627 else if (toplevel
1628 && (set = single_set (insn)) != 0
1629 && SET_DEST (set) == var
1630 /* If this represents the result of an insn group,
1631 don't delete the insn. */
1632 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1633 && (rtx_equal_p (SET_SRC (set), var)
1634 || (GET_CODE (SET_SRC (set)) == REG
1635 && (prev = prev_nonnote_insn (insn)) != 0
1636 && (prev_set = single_set (prev)) != 0
1637 && SET_DEST (prev_set) == SET_SRC (set)
1638 && rtx_equal_p (SET_SRC (prev_set), var))))
1639 {
1640 /* In unoptimized compilation, we shouldn't call delete_insn
1641 except in jump.c doing warnings. */
1642 PUT_CODE (insn, NOTE);
1643 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1644 NOTE_SOURCE_FILE (insn) = 0;
1645 if (insn == last_parm_insn)
1646 last_parm_insn = PREV_INSN (next);
1647 }
1648 else
1649 {
1650 struct fixup_replacement *replacements = 0;
1651 rtx next_insn = NEXT_INSN (insn);
1652
1653 if (SMALL_REGISTER_CLASSES)
1654 {
1655 /* If the insn that copies the results of a CALL_INSN
1656 into a pseudo now references VAR, we have to use an
1657 intermediate pseudo since we want the life of the
1658 return value register to be only a single insn.
1659
1660 If we don't use an intermediate pseudo, such things as
1661 address computations to make the address of VAR valid
1662 if it is not can be placed between the CALL_INSN and INSN.
1663
1664 To make sure this doesn't happen, we record the destination
1665 of the CALL_INSN and see if the next insn uses both that
1666 and VAR. */
1667
1668 if (call_dest != 0 && GET_CODE (insn) == INSN
1669 && reg_mentioned_p (var, PATTERN (insn))
1670 && reg_mentioned_p (call_dest, PATTERN (insn)))
1671 {
1672 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1673
1674 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1675
1676 PATTERN (insn) = replace_rtx (PATTERN (insn),
1677 call_dest, temp);
1678 }
1679
1680 if (GET_CODE (insn) == CALL_INSN
1681 && GET_CODE (PATTERN (insn)) == SET)
1682 call_dest = SET_DEST (PATTERN (insn));
1683 else if (GET_CODE (insn) == CALL_INSN
1684 && GET_CODE (PATTERN (insn)) == PARALLEL
1685 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1686 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1687 else
1688 call_dest = 0;
1689 }
1690
1691 /* See if we have to do anything to INSN now that VAR is in
1692 memory. If it needs to be loaded into a pseudo, use a single
1693 pseudo for the entire insn in case there is a MATCH_DUP
1694 between two operands. We pass a pointer to the head of
1695 a list of struct fixup_replacements. If fixup_var_refs_1
1696 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1697 it will record them in this list.
1698
1699 If it allocated a pseudo for any replacement, we copy into
1700 it here. */
1701
1702 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1703 &replacements);
1704
1705 /* If this is last_parm_insn, and any instructions were output
1706 after it to fix it up, then we must set last_parm_insn to
1707 the last such instruction emitted. */
1708 if (insn == last_parm_insn)
1709 last_parm_insn = PREV_INSN (next_insn);
1710
1711 while (replacements)
1712 {
1713 if (GET_CODE (replacements->new) == REG)
1714 {
1715 rtx insert_before;
1716 rtx seq;
1717
1718 /* OLD might be a (subreg (mem)). */
1719 if (GET_CODE (replacements->old) == SUBREG)
1720 replacements->old
1721 = fixup_memory_subreg (replacements->old, insn, 0);
1722 else
1723 replacements->old
1724 = fixup_stack_1 (replacements->old, insn);
1725
1726 insert_before = insn;
1727
1728 /* If we are changing the mode, do a conversion.
1729 This might be wasteful, but combine.c will
1730 eliminate much of the waste. */
1731
1732 if (GET_MODE (replacements->new)
1733 != GET_MODE (replacements->old))
1734 {
1735 start_sequence ();
1736 convert_move (replacements->new,
1737 replacements->old, unsignedp);
1738 seq = gen_sequence ();
1739 end_sequence ();
1740 }
1741 else
1742 seq = gen_move_insn (replacements->new,
1743 replacements->old);
1744
1745 emit_insn_before (seq, insert_before);
1746 }
1747
1748 replacements = replacements->next;
1749 }
1750 }
1751
1752 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1753 But don't touch other insns referred to by reg-notes;
1754 we will get them elsewhere. */
1755 while (note)
1756 {
1757 if (GET_CODE (note) != INSN_LIST)
1758 XEXP (note, 0)
1759 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1760 note = XEXP (note, 1);
1761 }
1762 }
1763
1764 if (!ht)
1765 insn = next;
1766 else if (insn_list)
1767 {
1768 insn = XEXP (insn_list, 0);
1769 insn_list = XEXP (insn_list, 1);
1770 }
1771 else
1772 insn = NULL_RTX;
1773 }
1774 }
1775 \f
1776 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1777 See if the rtx expression at *LOC in INSN needs to be changed.
1778
1779 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1780 contain a list of original rtx's and replacements. If we find that we need
1781 to modify this insn by replacing a memory reference with a pseudo or by
1782 making a new MEM to implement a SUBREG, we consult that list to see if
1783 we have already chosen a replacement. If none has already been allocated,
1784 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1785 or the SUBREG, as appropriate, to the pseudo. */
1786
1787 static void
1788 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1789 register rtx var;
1790 enum machine_mode promoted_mode;
1791 register rtx *loc;
1792 rtx insn;
1793 struct fixup_replacement **replacements;
1794 {
1795 register int i;
1796 register rtx x = *loc;
1797 RTX_CODE code = GET_CODE (x);
1798 register const char *fmt;
1799 register rtx tem, tem1;
1800 struct fixup_replacement *replacement;
1801
1802 switch (code)
1803 {
1804 case ADDRESSOF:
1805 if (XEXP (x, 0) == var)
1806 {
1807 /* Prevent sharing of rtl that might lose. */
1808 rtx sub = copy_rtx (XEXP (var, 0));
1809
1810 if (! validate_change (insn, loc, sub, 0))
1811 {
1812 rtx y = gen_reg_rtx (GET_MODE (sub));
1813 rtx seq, new_insn;
1814
1815 /* We should be able to replace with a register or all is lost.
1816 Note that we can't use validate_change to verify this, since
1817 we're not caring for replacing all dups simultaneously. */
1818 if (! validate_replace_rtx (*loc, y, insn))
1819 abort ();
1820
1821 /* Careful! First try to recognize a direct move of the
1822 value, mimicking how things are done in gen_reload wrt
1823 PLUS. Consider what happens when insn is a conditional
1824 move instruction and addsi3 clobbers flags. */
1825
1826 start_sequence ();
1827 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1828 seq = gen_sequence ();
1829 end_sequence ();
1830
1831 if (recog_memoized (new_insn) < 0)
1832 {
1833 /* That failed. Fall back on force_operand and hope. */
1834
1835 start_sequence ();
1836 force_operand (sub, y);
1837 seq = gen_sequence ();
1838 end_sequence ();
1839 }
1840
1841 #ifdef HAVE_cc0
1842 /* Don't separate setter from user. */
1843 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1844 insn = PREV_INSN (insn);
1845 #endif
1846
1847 emit_insn_before (seq, insn);
1848 }
1849 }
1850 return;
1851
1852 case MEM:
1853 if (var == x)
1854 {
1855 /* If we already have a replacement, use it. Otherwise,
1856 try to fix up this address in case it is invalid. */
1857
1858 replacement = find_fixup_replacement (replacements, var);
1859 if (replacement->new)
1860 {
1861 *loc = replacement->new;
1862 return;
1863 }
1864
1865 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1866
1867 /* Unless we are forcing memory to register or we changed the mode,
1868 we can leave things the way they are if the insn is valid. */
1869
1870 INSN_CODE (insn) = -1;
1871 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1872 && recog_memoized (insn) >= 0)
1873 return;
1874
1875 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1876 return;
1877 }
1878
1879 /* If X contains VAR, we need to unshare it here so that we update
1880 each occurrence separately. But all identical MEMs in one insn
1881 must be replaced with the same rtx because of the possibility of
1882 MATCH_DUPs. */
1883
1884 if (reg_mentioned_p (var, x))
1885 {
1886 replacement = find_fixup_replacement (replacements, x);
1887 if (replacement->new == 0)
1888 replacement->new = copy_most_rtx (x, var);
1889
1890 *loc = x = replacement->new;
1891 }
1892 break;
1893
1894 case REG:
1895 case CC0:
1896 case PC:
1897 case CONST_INT:
1898 case CONST:
1899 case SYMBOL_REF:
1900 case LABEL_REF:
1901 case CONST_DOUBLE:
1902 return;
1903
1904 case SIGN_EXTRACT:
1905 case ZERO_EXTRACT:
1906 /* Note that in some cases those types of expressions are altered
1907 by optimize_bit_field, and do not survive to get here. */
1908 if (XEXP (x, 0) == var
1909 || (GET_CODE (XEXP (x, 0)) == SUBREG
1910 && SUBREG_REG (XEXP (x, 0)) == var))
1911 {
1912 /* Get TEM as a valid MEM in the mode presently in the insn.
1913
1914 We don't worry about the possibility of MATCH_DUP here; it
1915 is highly unlikely and would be tricky to handle. */
1916
1917 tem = XEXP (x, 0);
1918 if (GET_CODE (tem) == SUBREG)
1919 {
1920 if (GET_MODE_BITSIZE (GET_MODE (tem))
1921 > GET_MODE_BITSIZE (GET_MODE (var)))
1922 {
1923 replacement = find_fixup_replacement (replacements, var);
1924 if (replacement->new == 0)
1925 replacement->new = gen_reg_rtx (GET_MODE (var));
1926 SUBREG_REG (tem) = replacement->new;
1927 }
1928 else
1929 tem = fixup_memory_subreg (tem, insn, 0);
1930 }
1931 else
1932 tem = fixup_stack_1 (tem, insn);
1933
1934 /* Unless we want to load from memory, get TEM into the proper mode
1935 for an extract from memory. This can only be done if the
1936 extract is at a constant position and length. */
1937
1938 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1939 && GET_CODE (XEXP (x, 2)) == CONST_INT
1940 && ! mode_dependent_address_p (XEXP (tem, 0))
1941 && ! MEM_VOLATILE_P (tem))
1942 {
1943 enum machine_mode wanted_mode = VOIDmode;
1944 enum machine_mode is_mode = GET_MODE (tem);
1945 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1946
1947 #ifdef HAVE_extzv
1948 if (GET_CODE (x) == ZERO_EXTRACT)
1949 {
1950 wanted_mode
1951 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
1952 if (wanted_mode == VOIDmode)
1953 wanted_mode = word_mode;
1954 }
1955 #endif
1956 #ifdef HAVE_extv
1957 if (GET_CODE (x) == SIGN_EXTRACT)
1958 {
1959 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
1960 if (wanted_mode == VOIDmode)
1961 wanted_mode = word_mode;
1962 }
1963 #endif
1964 /* If we have a narrower mode, we can do something. */
1965 if (wanted_mode != VOIDmode
1966 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1967 {
1968 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1969 rtx old_pos = XEXP (x, 2);
1970 rtx newmem;
1971
1972 /* If the bytes and bits are counted differently, we
1973 must adjust the offset. */
1974 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1975 offset = (GET_MODE_SIZE (is_mode)
1976 - GET_MODE_SIZE (wanted_mode) - offset);
1977
1978 pos %= GET_MODE_BITSIZE (wanted_mode);
1979
1980 newmem = gen_rtx_MEM (wanted_mode,
1981 plus_constant (XEXP (tem, 0), offset));
1982 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1983 MEM_COPY_ATTRIBUTES (newmem, tem);
1984
1985 /* Make the change and see if the insn remains valid. */
1986 INSN_CODE (insn) = -1;
1987 XEXP (x, 0) = newmem;
1988 XEXP (x, 2) = GEN_INT (pos);
1989
1990 if (recog_memoized (insn) >= 0)
1991 return;
1992
1993 /* Otherwise, restore old position. XEXP (x, 0) will be
1994 restored later. */
1995 XEXP (x, 2) = old_pos;
1996 }
1997 }
1998
1999 /* If we get here, the bitfield extract insn can't accept a memory
2000 reference. Copy the input into a register. */
2001
2002 tem1 = gen_reg_rtx (GET_MODE (tem));
2003 emit_insn_before (gen_move_insn (tem1, tem), insn);
2004 XEXP (x, 0) = tem1;
2005 return;
2006 }
2007 break;
2008
2009 case SUBREG:
2010 if (SUBREG_REG (x) == var)
2011 {
2012 /* If this is a special SUBREG made because VAR was promoted
2013 from a wider mode, replace it with VAR and call ourself
2014 recursively, this time saying that the object previously
2015 had its current mode (by virtue of the SUBREG). */
2016
2017 if (SUBREG_PROMOTED_VAR_P (x))
2018 {
2019 *loc = var;
2020 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2021 return;
2022 }
2023
2024 /* If this SUBREG makes VAR wider, it has become a paradoxical
2025 SUBREG with VAR in memory, but these aren't allowed at this
2026 stage of the compilation. So load VAR into a pseudo and take
2027 a SUBREG of that pseudo. */
2028 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2029 {
2030 replacement = find_fixup_replacement (replacements, var);
2031 if (replacement->new == 0)
2032 replacement->new = gen_reg_rtx (GET_MODE (var));
2033 SUBREG_REG (x) = replacement->new;
2034 return;
2035 }
2036
2037 /* See if we have already found a replacement for this SUBREG.
2038 If so, use it. Otherwise, make a MEM and see if the insn
2039 is recognized. If not, or if we should force MEM into a register,
2040 make a pseudo for this SUBREG. */
2041 replacement = find_fixup_replacement (replacements, x);
2042 if (replacement->new)
2043 {
2044 *loc = replacement->new;
2045 return;
2046 }
2047
2048 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2049
2050 INSN_CODE (insn) = -1;
2051 if (! flag_force_mem && recog_memoized (insn) >= 0)
2052 return;
2053
2054 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2055 return;
2056 }
2057 break;
2058
2059 case SET:
2060 /* First do special simplification of bit-field references. */
2061 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2062 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2063 optimize_bit_field (x, insn, 0);
2064 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2065 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2066 optimize_bit_field (x, insn, NULL_PTR);
2067
2068 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2069 into a register and then store it back out. */
2070 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2071 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2072 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2073 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2074 > GET_MODE_SIZE (GET_MODE (var))))
2075 {
2076 replacement = find_fixup_replacement (replacements, var);
2077 if (replacement->new == 0)
2078 replacement->new = gen_reg_rtx (GET_MODE (var));
2079
2080 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2081 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2082 }
2083
2084 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2085 insn into a pseudo and store the low part of the pseudo into VAR. */
2086 if (GET_CODE (SET_DEST (x)) == SUBREG
2087 && SUBREG_REG (SET_DEST (x)) == var
2088 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2089 > GET_MODE_SIZE (GET_MODE (var))))
2090 {
2091 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2092 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2093 tem)),
2094 insn);
2095 break;
2096 }
2097
2098 {
2099 rtx dest = SET_DEST (x);
2100 rtx src = SET_SRC (x);
2101 #ifdef HAVE_insv
2102 rtx outerdest = dest;
2103 #endif
2104
2105 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2106 || GET_CODE (dest) == SIGN_EXTRACT
2107 || GET_CODE (dest) == ZERO_EXTRACT)
2108 dest = XEXP (dest, 0);
2109
2110 if (GET_CODE (src) == SUBREG)
2111 src = XEXP (src, 0);
2112
2113 /* If VAR does not appear at the top level of the SET
2114 just scan the lower levels of the tree. */
2115
2116 if (src != var && dest != var)
2117 break;
2118
2119 /* We will need to rerecognize this insn. */
2120 INSN_CODE (insn) = -1;
2121
2122 #ifdef HAVE_insv
2123 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2124 {
2125 /* Since this case will return, ensure we fixup all the
2126 operands here. */
2127 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2128 insn, replacements);
2129 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2130 insn, replacements);
2131 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2132 insn, replacements);
2133
2134 tem = XEXP (outerdest, 0);
2135
2136 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2137 that may appear inside a ZERO_EXTRACT.
2138 This was legitimate when the MEM was a REG. */
2139 if (GET_CODE (tem) == SUBREG
2140 && SUBREG_REG (tem) == var)
2141 tem = fixup_memory_subreg (tem, insn, 0);
2142 else
2143 tem = fixup_stack_1 (tem, insn);
2144
2145 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2146 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2147 && ! mode_dependent_address_p (XEXP (tem, 0))
2148 && ! MEM_VOLATILE_P (tem))
2149 {
2150 enum machine_mode wanted_mode;
2151 enum machine_mode is_mode = GET_MODE (tem);
2152 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2153
2154 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2155 if (wanted_mode == VOIDmode)
2156 wanted_mode = word_mode;
2157
2158 /* If we have a narrower mode, we can do something. */
2159 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2160 {
2161 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2162 rtx old_pos = XEXP (outerdest, 2);
2163 rtx newmem;
2164
2165 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2166 offset = (GET_MODE_SIZE (is_mode)
2167 - GET_MODE_SIZE (wanted_mode) - offset);
2168
2169 pos %= GET_MODE_BITSIZE (wanted_mode);
2170
2171 newmem = gen_rtx_MEM (wanted_mode,
2172 plus_constant (XEXP (tem, 0),
2173 offset));
2174 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2175 MEM_COPY_ATTRIBUTES (newmem, tem);
2176
2177 /* Make the change and see if the insn remains valid. */
2178 INSN_CODE (insn) = -1;
2179 XEXP (outerdest, 0) = newmem;
2180 XEXP (outerdest, 2) = GEN_INT (pos);
2181
2182 if (recog_memoized (insn) >= 0)
2183 return;
2184
2185 /* Otherwise, restore old position. XEXP (x, 0) will be
2186 restored later. */
2187 XEXP (outerdest, 2) = old_pos;
2188 }
2189 }
2190
2191 /* If we get here, the bit-field store doesn't allow memory
2192 or isn't located at a constant position. Load the value into
2193 a register, do the store, and put it back into memory. */
2194
2195 tem1 = gen_reg_rtx (GET_MODE (tem));
2196 emit_insn_before (gen_move_insn (tem1, tem), insn);
2197 emit_insn_after (gen_move_insn (tem, tem1), insn);
2198 XEXP (outerdest, 0) = tem1;
2199 return;
2200 }
2201 #endif
2202
2203 /* STRICT_LOW_PART is a no-op on memory references
2204 and it can cause combinations to be unrecognizable,
2205 so eliminate it. */
2206
2207 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2208 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2209
2210 /* A valid insn to copy VAR into or out of a register
2211 must be left alone, to avoid an infinite loop here.
2212 If the reference to VAR is by a subreg, fix that up,
2213 since SUBREG is not valid for a memref.
2214 Also fix up the address of the stack slot.
2215
2216 Note that we must not try to recognize the insn until
2217 after we know that we have valid addresses and no
2218 (subreg (mem ...) ...) constructs, since these interfere
2219 with determining the validity of the insn. */
2220
2221 if ((SET_SRC (x) == var
2222 || (GET_CODE (SET_SRC (x)) == SUBREG
2223 && SUBREG_REG (SET_SRC (x)) == var))
2224 && (GET_CODE (SET_DEST (x)) == REG
2225 || (GET_CODE (SET_DEST (x)) == SUBREG
2226 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2227 && GET_MODE (var) == promoted_mode
2228 && x == single_set (insn))
2229 {
2230 rtx pat;
2231
2232 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2233 if (replacement->new)
2234 SET_SRC (x) = replacement->new;
2235 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2236 SET_SRC (x) = replacement->new
2237 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2238 else
2239 SET_SRC (x) = replacement->new
2240 = fixup_stack_1 (SET_SRC (x), insn);
2241
2242 if (recog_memoized (insn) >= 0)
2243 return;
2244
2245 /* INSN is not valid, but we know that we want to
2246 copy SET_SRC (x) to SET_DEST (x) in some way. So
2247 we generate the move and see whether it requires more
2248 than one insn. If it does, we emit those insns and
2249 delete INSN. Otherwise, we an just replace the pattern
2250 of INSN; we have already verified above that INSN has
2251 no other function that to do X. */
2252
2253 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2254 if (GET_CODE (pat) == SEQUENCE)
2255 {
2256 emit_insn_after (pat, insn);
2257 PUT_CODE (insn, NOTE);
2258 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2259 NOTE_SOURCE_FILE (insn) = 0;
2260 }
2261 else
2262 PATTERN (insn) = pat;
2263
2264 return;
2265 }
2266
2267 if ((SET_DEST (x) == var
2268 || (GET_CODE (SET_DEST (x)) == SUBREG
2269 && SUBREG_REG (SET_DEST (x)) == var))
2270 && (GET_CODE (SET_SRC (x)) == REG
2271 || (GET_CODE (SET_SRC (x)) == SUBREG
2272 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2273 && GET_MODE (var) == promoted_mode
2274 && x == single_set (insn))
2275 {
2276 rtx pat;
2277
2278 if (GET_CODE (SET_DEST (x)) == SUBREG)
2279 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2280 else
2281 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2282
2283 if (recog_memoized (insn) >= 0)
2284 return;
2285
2286 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2287 if (GET_CODE (pat) == SEQUENCE)
2288 {
2289 emit_insn_after (pat, insn);
2290 PUT_CODE (insn, NOTE);
2291 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2292 NOTE_SOURCE_FILE (insn) = 0;
2293 }
2294 else
2295 PATTERN (insn) = pat;
2296
2297 return;
2298 }
2299
2300 /* Otherwise, storing into VAR must be handled specially
2301 by storing into a temporary and copying that into VAR
2302 with a new insn after this one. Note that this case
2303 will be used when storing into a promoted scalar since
2304 the insn will now have different modes on the input
2305 and output and hence will be invalid (except for the case
2306 of setting it to a constant, which does not need any
2307 change if it is valid). We generate extra code in that case,
2308 but combine.c will eliminate it. */
2309
2310 if (dest == var)
2311 {
2312 rtx temp;
2313 rtx fixeddest = SET_DEST (x);
2314
2315 /* STRICT_LOW_PART can be discarded, around a MEM. */
2316 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2317 fixeddest = XEXP (fixeddest, 0);
2318 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2319 if (GET_CODE (fixeddest) == SUBREG)
2320 {
2321 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2322 promoted_mode = GET_MODE (fixeddest);
2323 }
2324 else
2325 fixeddest = fixup_stack_1 (fixeddest, insn);
2326
2327 temp = gen_reg_rtx (promoted_mode);
2328
2329 emit_insn_after (gen_move_insn (fixeddest,
2330 gen_lowpart (GET_MODE (fixeddest),
2331 temp)),
2332 insn);
2333
2334 SET_DEST (x) = temp;
2335 }
2336 }
2337
2338 default:
2339 break;
2340 }
2341
2342 /* Nothing special about this RTX; fix its operands. */
2343
2344 fmt = GET_RTX_FORMAT (code);
2345 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2346 {
2347 if (fmt[i] == 'e')
2348 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2349 else if (fmt[i] == 'E')
2350 {
2351 register int j;
2352 for (j = 0; j < XVECLEN (x, i); j++)
2353 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2354 insn, replacements);
2355 }
2356 }
2357 }
2358 \f
2359 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2360 return an rtx (MEM:m1 newaddr) which is equivalent.
2361 If any insns must be emitted to compute NEWADDR, put them before INSN.
2362
2363 UNCRITICAL nonzero means accept paradoxical subregs.
2364 This is used for subregs found inside REG_NOTES. */
2365
2366 static rtx
2367 fixup_memory_subreg (x, insn, uncritical)
2368 rtx x;
2369 rtx insn;
2370 int uncritical;
2371 {
2372 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2373 rtx addr = XEXP (SUBREG_REG (x), 0);
2374 enum machine_mode mode = GET_MODE (x);
2375 rtx result;
2376
2377 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2378 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2379 && ! uncritical)
2380 abort ();
2381
2382 if (BYTES_BIG_ENDIAN)
2383 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2384 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2385 addr = plus_constant (addr, offset);
2386 if (!flag_force_addr && memory_address_p (mode, addr))
2387 /* Shortcut if no insns need be emitted. */
2388 return change_address (SUBREG_REG (x), mode, addr);
2389 start_sequence ();
2390 result = change_address (SUBREG_REG (x), mode, addr);
2391 emit_insn_before (gen_sequence (), insn);
2392 end_sequence ();
2393 return result;
2394 }
2395
2396 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2397 Replace subexpressions of X in place.
2398 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2399 Otherwise return X, with its contents possibly altered.
2400
2401 If any insns must be emitted to compute NEWADDR, put them before INSN.
2402
2403 UNCRITICAL is as in fixup_memory_subreg. */
2404
2405 static rtx
2406 walk_fixup_memory_subreg (x, insn, uncritical)
2407 register rtx x;
2408 rtx insn;
2409 int uncritical;
2410 {
2411 register enum rtx_code code;
2412 register const char *fmt;
2413 register int i;
2414
2415 if (x == 0)
2416 return 0;
2417
2418 code = GET_CODE (x);
2419
2420 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2421 return fixup_memory_subreg (x, insn, uncritical);
2422
2423 /* Nothing special about this RTX; fix its operands. */
2424
2425 fmt = GET_RTX_FORMAT (code);
2426 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2427 {
2428 if (fmt[i] == 'e')
2429 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2430 else if (fmt[i] == 'E')
2431 {
2432 register int j;
2433 for (j = 0; j < XVECLEN (x, i); j++)
2434 XVECEXP (x, i, j)
2435 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2436 }
2437 }
2438 return x;
2439 }
2440 \f
2441 /* For each memory ref within X, if it refers to a stack slot
2442 with an out of range displacement, put the address in a temp register
2443 (emitting new insns before INSN to load these registers)
2444 and alter the memory ref to use that register.
2445 Replace each such MEM rtx with a copy, to avoid clobberage. */
2446
2447 static rtx
2448 fixup_stack_1 (x, insn)
2449 rtx x;
2450 rtx insn;
2451 {
2452 register int i;
2453 register RTX_CODE code = GET_CODE (x);
2454 register const char *fmt;
2455
2456 if (code == MEM)
2457 {
2458 register rtx ad = XEXP (x, 0);
2459 /* If we have address of a stack slot but it's not valid
2460 (displacement is too large), compute the sum in a register. */
2461 if (GET_CODE (ad) == PLUS
2462 && GET_CODE (XEXP (ad, 0)) == REG
2463 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2464 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2465 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2466 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2467 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2468 #endif
2469 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2470 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2471 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2472 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2473 {
2474 rtx temp, seq;
2475 if (memory_address_p (GET_MODE (x), ad))
2476 return x;
2477
2478 start_sequence ();
2479 temp = copy_to_reg (ad);
2480 seq = gen_sequence ();
2481 end_sequence ();
2482 emit_insn_before (seq, insn);
2483 return change_address (x, VOIDmode, temp);
2484 }
2485 return x;
2486 }
2487
2488 fmt = GET_RTX_FORMAT (code);
2489 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2490 {
2491 if (fmt[i] == 'e')
2492 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2493 else if (fmt[i] == 'E')
2494 {
2495 register int j;
2496 for (j = 0; j < XVECLEN (x, i); j++)
2497 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2498 }
2499 }
2500 return x;
2501 }
2502 \f
2503 /* Optimization: a bit-field instruction whose field
2504 happens to be a byte or halfword in memory
2505 can be changed to a move instruction.
2506
2507 We call here when INSN is an insn to examine or store into a bit-field.
2508 BODY is the SET-rtx to be altered.
2509
2510 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2511 (Currently this is called only from function.c, and EQUIV_MEM
2512 is always 0.) */
2513
2514 static void
2515 optimize_bit_field (body, insn, equiv_mem)
2516 rtx body;
2517 rtx insn;
2518 rtx *equiv_mem;
2519 {
2520 register rtx bitfield;
2521 int destflag;
2522 rtx seq = 0;
2523 enum machine_mode mode;
2524
2525 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2526 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2527 bitfield = SET_DEST (body), destflag = 1;
2528 else
2529 bitfield = SET_SRC (body), destflag = 0;
2530
2531 /* First check that the field being stored has constant size and position
2532 and is in fact a byte or halfword suitably aligned. */
2533
2534 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2535 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2536 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2537 != BLKmode)
2538 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2539 {
2540 register rtx memref = 0;
2541
2542 /* Now check that the containing word is memory, not a register,
2543 and that it is safe to change the machine mode. */
2544
2545 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2546 memref = XEXP (bitfield, 0);
2547 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2548 && equiv_mem != 0)
2549 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2550 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2551 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2552 memref = SUBREG_REG (XEXP (bitfield, 0));
2553 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2554 && equiv_mem != 0
2555 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2556 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2557
2558 if (memref
2559 && ! mode_dependent_address_p (XEXP (memref, 0))
2560 && ! MEM_VOLATILE_P (memref))
2561 {
2562 /* Now adjust the address, first for any subreg'ing
2563 that we are now getting rid of,
2564 and then for which byte of the word is wanted. */
2565
2566 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2567 rtx insns;
2568
2569 /* Adjust OFFSET to count bits from low-address byte. */
2570 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2571 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2572 - offset - INTVAL (XEXP (bitfield, 1)));
2573
2574 /* Adjust OFFSET to count bytes from low-address byte. */
2575 offset /= BITS_PER_UNIT;
2576 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2577 {
2578 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2579 if (BYTES_BIG_ENDIAN)
2580 offset -= (MIN (UNITS_PER_WORD,
2581 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2582 - MIN (UNITS_PER_WORD,
2583 GET_MODE_SIZE (GET_MODE (memref))));
2584 }
2585
2586 start_sequence ();
2587 memref = change_address (memref, mode,
2588 plus_constant (XEXP (memref, 0), offset));
2589 insns = get_insns ();
2590 end_sequence ();
2591 emit_insns_before (insns, insn);
2592
2593 /* Store this memory reference where
2594 we found the bit field reference. */
2595
2596 if (destflag)
2597 {
2598 validate_change (insn, &SET_DEST (body), memref, 1);
2599 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2600 {
2601 rtx src = SET_SRC (body);
2602 while (GET_CODE (src) == SUBREG
2603 && SUBREG_WORD (src) == 0)
2604 src = SUBREG_REG (src);
2605 if (GET_MODE (src) != GET_MODE (memref))
2606 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2607 validate_change (insn, &SET_SRC (body), src, 1);
2608 }
2609 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2610 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2611 /* This shouldn't happen because anything that didn't have
2612 one of these modes should have got converted explicitly
2613 and then referenced through a subreg.
2614 This is so because the original bit-field was
2615 handled by agg_mode and so its tree structure had
2616 the same mode that memref now has. */
2617 abort ();
2618 }
2619 else
2620 {
2621 rtx dest = SET_DEST (body);
2622
2623 while (GET_CODE (dest) == SUBREG
2624 && SUBREG_WORD (dest) == 0
2625 && (GET_MODE_CLASS (GET_MODE (dest))
2626 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2627 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2628 <= UNITS_PER_WORD))
2629 dest = SUBREG_REG (dest);
2630
2631 validate_change (insn, &SET_DEST (body), dest, 1);
2632
2633 if (GET_MODE (dest) == GET_MODE (memref))
2634 validate_change (insn, &SET_SRC (body), memref, 1);
2635 else
2636 {
2637 /* Convert the mem ref to the destination mode. */
2638 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2639
2640 start_sequence ();
2641 convert_move (newreg, memref,
2642 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2643 seq = get_insns ();
2644 end_sequence ();
2645
2646 validate_change (insn, &SET_SRC (body), newreg, 1);
2647 }
2648 }
2649
2650 /* See if we can convert this extraction or insertion into
2651 a simple move insn. We might not be able to do so if this
2652 was, for example, part of a PARALLEL.
2653
2654 If we succeed, write out any needed conversions. If we fail,
2655 it is hard to guess why we failed, so don't do anything
2656 special; just let the optimization be suppressed. */
2657
2658 if (apply_change_group () && seq)
2659 emit_insns_before (seq, insn);
2660 }
2661 }
2662 }
2663 \f
2664 /* These routines are responsible for converting virtual register references
2665 to the actual hard register references once RTL generation is complete.
2666
2667 The following four variables are used for communication between the
2668 routines. They contain the offsets of the virtual registers from their
2669 respective hard registers. */
2670
2671 static int in_arg_offset;
2672 static int var_offset;
2673 static int dynamic_offset;
2674 static int out_arg_offset;
2675 static int cfa_offset;
2676
2677 /* In most machines, the stack pointer register is equivalent to the bottom
2678 of the stack. */
2679
2680 #ifndef STACK_POINTER_OFFSET
2681 #define STACK_POINTER_OFFSET 0
2682 #endif
2683
2684 /* If not defined, pick an appropriate default for the offset of dynamically
2685 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2686 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2687
2688 #ifndef STACK_DYNAMIC_OFFSET
2689
2690 #ifdef ACCUMULATE_OUTGOING_ARGS
2691 /* The bottom of the stack points to the actual arguments. If
2692 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2693 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2694 stack space for register parameters is not pushed by the caller, but
2695 rather part of the fixed stack areas and hence not included in
2696 `current_function_outgoing_args_size'. Nevertheless, we must allow
2697 for it when allocating stack dynamic objects. */
2698
2699 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2700 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2701 (current_function_outgoing_args_size \
2702 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2703
2704 #else
2705 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2706 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2707 #endif
2708
2709 #else
2710 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2711 #endif
2712 #endif
2713
2714 /* On a few machines, the CFA coincides with the arg pointer. */
2715
2716 #ifndef ARG_POINTER_CFA_OFFSET
2717 #define ARG_POINTER_CFA_OFFSET 0
2718 #endif
2719
2720
2721 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2722 its address taken. DECL is the decl for the object stored in the
2723 register, for later use if we do need to force REG into the stack.
2724 REG is overwritten by the MEM like in put_reg_into_stack. */
2725
2726 rtx
2727 gen_mem_addressof (reg, decl)
2728 rtx reg;
2729 tree decl;
2730 {
2731 tree type = TREE_TYPE (decl);
2732 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2733 REGNO (reg), decl);
2734
2735 /* If the original REG was a user-variable, then so is the REG whose
2736 address is being taken. Likewise for unchanging. */
2737 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2738 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2739
2740 PUT_CODE (reg, MEM);
2741 PUT_MODE (reg, DECL_MODE (decl));
2742 XEXP (reg, 0) = r;
2743 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2744 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2745 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2746
2747 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2748 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2749
2750 return reg;
2751 }
2752
2753 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2754
2755 #if 0
2756 void
2757 flush_addressof (decl)
2758 tree decl;
2759 {
2760 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2761 && DECL_RTL (decl) != 0
2762 && GET_CODE (DECL_RTL (decl)) == MEM
2763 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2764 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2765 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2766 }
2767 #endif
2768
2769 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2770
2771 static void
2772 put_addressof_into_stack (r, ht)
2773 rtx r;
2774 struct hash_table *ht;
2775 {
2776 tree decl = ADDRESSOF_DECL (r);
2777 rtx reg = XEXP (r, 0);
2778
2779 if (GET_CODE (reg) != REG)
2780 abort ();
2781
2782 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2783 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2784 ADDRESSOF_REGNO (r),
2785 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
2786 }
2787
2788 /* List of replacements made below in purge_addressof_1 when creating
2789 bitfield insertions. */
2790 static rtx purge_bitfield_addressof_replacements;
2791
2792 /* List of replacements made below in purge_addressof_1 for patterns
2793 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2794 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2795 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2796 enough in complex cases, e.g. when some field values can be
2797 extracted by usage MEM with narrower mode. */
2798 static rtx purge_addressof_replacements;
2799
2800 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2801 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2802 the stack. If the function returns FALSE then the replacement could not
2803 be made. */
2804
2805 static boolean
2806 purge_addressof_1 (loc, insn, force, store, ht)
2807 rtx *loc;
2808 rtx insn;
2809 int force, store;
2810 struct hash_table *ht;
2811 {
2812 rtx x;
2813 RTX_CODE code;
2814 int i, j;
2815 const char *fmt;
2816 boolean result = true;
2817
2818 /* Re-start here to avoid recursion in common cases. */
2819 restart:
2820
2821 x = *loc;
2822 if (x == 0)
2823 return true;
2824
2825 code = GET_CODE (x);
2826
2827 /* If we don't return in any of the cases below, we will recurse inside
2828 the RTX, which will normally result in any ADDRESSOF being forced into
2829 memory. */
2830 if (code == SET)
2831 {
2832 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2833 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2834 return result;
2835 }
2836
2837 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2838 {
2839 /* We must create a copy of the rtx because it was created by
2840 overwriting a REG rtx which is always shared. */
2841 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2842 rtx insns;
2843
2844 if (validate_change (insn, loc, sub, 0)
2845 || validate_replace_rtx (x, sub, insn))
2846 return true;
2847
2848 start_sequence ();
2849 sub = force_operand (sub, NULL_RTX);
2850 if (! validate_change (insn, loc, sub, 0)
2851 && ! validate_replace_rtx (x, sub, insn))
2852 abort ();
2853
2854 insns = gen_sequence ();
2855 end_sequence ();
2856 emit_insn_before (insns, insn);
2857 return true;
2858 }
2859
2860 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2861 {
2862 rtx sub = XEXP (XEXP (x, 0), 0);
2863 rtx sub2;
2864
2865 if (GET_CODE (sub) == MEM)
2866 {
2867 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2868 MEM_COPY_ATTRIBUTES (sub2, sub);
2869 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2870 sub = sub2;
2871 }
2872 else if (GET_CODE (sub) == REG
2873 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2874 ;
2875 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2876 {
2877 int size_x, size_sub;
2878
2879 if (!insn)
2880 {
2881 /* When processing REG_NOTES look at the list of
2882 replacements done on the insn to find the register that X
2883 was replaced by. */
2884 rtx tem;
2885
2886 for (tem = purge_bitfield_addressof_replacements;
2887 tem != NULL_RTX;
2888 tem = XEXP (XEXP (tem, 1), 1))
2889 if (rtx_equal_p (x, XEXP (tem, 0)))
2890 {
2891 *loc = XEXP (XEXP (tem, 1), 0);
2892 return true;
2893 }
2894
2895 /* See comment for purge_addressof_replacements. */
2896 for (tem = purge_addressof_replacements;
2897 tem != NULL_RTX;
2898 tem = XEXP (XEXP (tem, 1), 1))
2899 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2900 {
2901 rtx z = XEXP (XEXP (tem, 1), 0);
2902
2903 if (GET_MODE (x) == GET_MODE (z)
2904 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2905 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2906 abort ();
2907
2908 /* It can happen that the note may speak of things
2909 in a wider (or just different) mode than the
2910 code did. This is especially true of
2911 REG_RETVAL. */
2912
2913 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2914 z = SUBREG_REG (z);
2915
2916 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2917 && (GET_MODE_SIZE (GET_MODE (x))
2918 > GET_MODE_SIZE (GET_MODE (z))))
2919 {
2920 /* This can occur as a result in invalid
2921 pointer casts, e.g. float f; ...
2922 *(long long int *)&f.
2923 ??? We could emit a warning here, but
2924 without a line number that wouldn't be
2925 very helpful. */
2926 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2927 }
2928 else
2929 z = gen_lowpart (GET_MODE (x), z);
2930
2931 *loc = z;
2932 return true;
2933 }
2934
2935 /* Sometimes we may not be able to find the replacement. For
2936 example when the original insn was a MEM in a wider mode,
2937 and the note is part of a sign extension of a narrowed
2938 version of that MEM. Gcc testcase compile/990829-1.c can
2939 generate an example of this siutation. Rather than complain
2940 we return false, which will prompt our caller to remove the
2941 offending note. */
2942 return false;
2943 }
2944
2945 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2946 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2947
2948 /* Don't even consider working with paradoxical subregs,
2949 or the moral equivalent seen here. */
2950 if (size_x <= size_sub
2951 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2952 {
2953 /* Do a bitfield insertion to mirror what would happen
2954 in memory. */
2955
2956 rtx val, seq;
2957
2958 if (store)
2959 {
2960 rtx p = PREV_INSN (insn);
2961
2962 start_sequence ();
2963 val = gen_reg_rtx (GET_MODE (x));
2964 if (! validate_change (insn, loc, val, 0))
2965 {
2966 /* Discard the current sequence and put the
2967 ADDRESSOF on stack. */
2968 end_sequence ();
2969 goto give_up;
2970 }
2971 seq = gen_sequence ();
2972 end_sequence ();
2973 emit_insn_before (seq, insn);
2974 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2975 insn, ht);
2976
2977 start_sequence ();
2978 store_bit_field (sub, size_x, 0, GET_MODE (x),
2979 val, GET_MODE_SIZE (GET_MODE (sub)),
2980 GET_MODE_SIZE (GET_MODE (sub)));
2981
2982 /* Make sure to unshare any shared rtl that store_bit_field
2983 might have created. */
2984 unshare_all_rtl_again (get_insns ());
2985
2986 seq = gen_sequence ();
2987 end_sequence ();
2988 p = emit_insn_after (seq, insn);
2989 if (NEXT_INSN (insn))
2990 compute_insns_for_mem (NEXT_INSN (insn),
2991 p ? NEXT_INSN (p) : NULL_RTX,
2992 ht);
2993 }
2994 else
2995 {
2996 rtx p = PREV_INSN (insn);
2997
2998 start_sequence ();
2999 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3000 GET_MODE (x), GET_MODE (x),
3001 GET_MODE_SIZE (GET_MODE (sub)),
3002 GET_MODE_SIZE (GET_MODE (sub)));
3003
3004 if (! validate_change (insn, loc, val, 0))
3005 {
3006 /* Discard the current sequence and put the
3007 ADDRESSOF on stack. */
3008 end_sequence ();
3009 goto give_up;
3010 }
3011
3012 seq = gen_sequence ();
3013 end_sequence ();
3014 emit_insn_before (seq, insn);
3015 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3016 insn, ht);
3017 }
3018
3019 /* Remember the replacement so that the same one can be done
3020 on the REG_NOTES. */
3021 purge_bitfield_addressof_replacements
3022 = gen_rtx_EXPR_LIST (VOIDmode, x,
3023 gen_rtx_EXPR_LIST
3024 (VOIDmode, val,
3025 purge_bitfield_addressof_replacements));
3026
3027 /* We replaced with a reg -- all done. */
3028 return true;
3029 }
3030 }
3031
3032 else if (validate_change (insn, loc, sub, 0))
3033 {
3034 /* Remember the replacement so that the same one can be done
3035 on the REG_NOTES. */
3036 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3037 {
3038 rtx tem;
3039
3040 for (tem = purge_addressof_replacements;
3041 tem != NULL_RTX;
3042 tem = XEXP (XEXP (tem, 1), 1))
3043 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3044 {
3045 XEXP (XEXP (tem, 1), 0) = sub;
3046 return true;
3047 }
3048 purge_addressof_replacements
3049 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3050 gen_rtx_EXPR_LIST (VOIDmode, sub,
3051 purge_addressof_replacements));
3052 return true;
3053 }
3054 goto restart;
3055 }
3056 give_up:;
3057 /* else give up and put it into the stack */
3058 }
3059
3060 else if (code == ADDRESSOF)
3061 {
3062 put_addressof_into_stack (x, ht);
3063 return true;
3064 }
3065 else if (code == SET)
3066 {
3067 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3068 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3069 return result;
3070 }
3071
3072 /* Scan all subexpressions. */
3073 fmt = GET_RTX_FORMAT (code);
3074 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3075 {
3076 if (*fmt == 'e')
3077 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3078 else if (*fmt == 'E')
3079 for (j = 0; j < XVECLEN (x, i); j++)
3080 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3081 }
3082
3083 return result;
3084 }
3085
3086 /* Return a new hash table entry in HT. */
3087
3088 static struct hash_entry *
3089 insns_for_mem_newfunc (he, ht, k)
3090 struct hash_entry *he;
3091 struct hash_table *ht;
3092 hash_table_key k ATTRIBUTE_UNUSED;
3093 {
3094 struct insns_for_mem_entry *ifmhe;
3095 if (he)
3096 return he;
3097
3098 ifmhe = ((struct insns_for_mem_entry *)
3099 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3100 ifmhe->insns = NULL_RTX;
3101
3102 return &ifmhe->he;
3103 }
3104
3105 /* Return a hash value for K, a REG. */
3106
3107 static unsigned long
3108 insns_for_mem_hash (k)
3109 hash_table_key k;
3110 {
3111 /* K is really a RTX. Just use the address as the hash value. */
3112 return (unsigned long) k;
3113 }
3114
3115 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3116
3117 static boolean
3118 insns_for_mem_comp (k1, k2)
3119 hash_table_key k1;
3120 hash_table_key k2;
3121 {
3122 return k1 == k2;
3123 }
3124
3125 struct insns_for_mem_walk_info {
3126 /* The hash table that we are using to record which INSNs use which
3127 MEMs. */
3128 struct hash_table *ht;
3129
3130 /* The INSN we are currently proessing. */
3131 rtx insn;
3132
3133 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3134 to find the insns that use the REGs in the ADDRESSOFs. */
3135 int pass;
3136 };
3137
3138 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3139 that might be used in an ADDRESSOF expression, record this INSN in
3140 the hash table given by DATA (which is really a pointer to an
3141 insns_for_mem_walk_info structure). */
3142
3143 static int
3144 insns_for_mem_walk (r, data)
3145 rtx *r;
3146 void *data;
3147 {
3148 struct insns_for_mem_walk_info *ifmwi
3149 = (struct insns_for_mem_walk_info *) data;
3150
3151 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3152 && GET_CODE (XEXP (*r, 0)) == REG)
3153 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3154 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3155 {
3156 /* Lookup this MEM in the hashtable, creating it if necessary. */
3157 struct insns_for_mem_entry *ifme
3158 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3159 *r,
3160 /*create=*/0,
3161 /*copy=*/0);
3162
3163 /* If we have not already recorded this INSN, do so now. Since
3164 we process the INSNs in order, we know that if we have
3165 recorded it it must be at the front of the list. */
3166 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3167 {
3168 /* We do the allocation on the same obstack as is used for
3169 the hash table since this memory will not be used once
3170 the hash table is deallocated. */
3171 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3172 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3173 ifme->insns);
3174 pop_obstacks ();
3175 }
3176 }
3177
3178 return 0;
3179 }
3180
3181 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3182 which REGs in HT. */
3183
3184 static void
3185 compute_insns_for_mem (insns, last_insn, ht)
3186 rtx insns;
3187 rtx last_insn;
3188 struct hash_table *ht;
3189 {
3190 rtx insn;
3191 struct insns_for_mem_walk_info ifmwi;
3192 ifmwi.ht = ht;
3193
3194 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3195 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3196 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3197 {
3198 ifmwi.insn = insn;
3199 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3200 }
3201 }
3202
3203 /* Helper function for purge_addressof called through for_each_rtx.
3204 Returns true iff the rtl is an ADDRESSOF. */
3205 static int
3206 is_addressof (rtl, data)
3207 rtx * rtl;
3208 void * data ATTRIBUTE_UNUSED;
3209 {
3210 return GET_CODE (* rtl) == ADDRESSOF;
3211 }
3212
3213 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3214 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3215 stack. */
3216
3217 void
3218 purge_addressof (insns)
3219 rtx insns;
3220 {
3221 rtx insn;
3222 struct hash_table ht;
3223
3224 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3225 requires a fixup pass over the instruction stream to correct
3226 INSNs that depended on the REG being a REG, and not a MEM. But,
3227 these fixup passes are slow. Furthermore, more MEMs are not
3228 mentioned in very many instructions. So, we speed up the process
3229 by pre-calculating which REGs occur in which INSNs; that allows
3230 us to perform the fixup passes much more quickly. */
3231 hash_table_init (&ht,
3232 insns_for_mem_newfunc,
3233 insns_for_mem_hash,
3234 insns_for_mem_comp);
3235 compute_insns_for_mem (insns, NULL_RTX, &ht);
3236
3237 for (insn = insns; insn; insn = NEXT_INSN (insn))
3238 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3239 || GET_CODE (insn) == CALL_INSN)
3240 {
3241 if (! purge_addressof_1 (&PATTERN (insn), insn,
3242 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3243 /* If we could not replace the ADDRESSOFs in the insn,
3244 something is wrong. */
3245 abort ();
3246
3247 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3248 {
3249 /* If we could not replace the ADDRESSOFs in the insn's notes,
3250 we can just remove the offending notes instead. */
3251 rtx note;
3252
3253 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3254 {
3255 /* If we find a REG_RETVAL note then the insn is a libcall.
3256 Such insns must have REG_EQUAL notes as well, in order
3257 for later passes of the compiler to work. So it is not
3258 safe to delete the notes here, and instead we abort. */
3259 if (REG_NOTE_KIND (note) == REG_RETVAL)
3260 abort ();
3261 if (for_each_rtx (& note, is_addressof, NULL))
3262 remove_note (insn, note);
3263 }
3264 }
3265 }
3266
3267 /* Clean up. */
3268 hash_table_free (&ht);
3269 purge_bitfield_addressof_replacements = 0;
3270 purge_addressof_replacements = 0;
3271 }
3272 \f
3273 /* Pass through the INSNS of function FNDECL and convert virtual register
3274 references to hard register references. */
3275
3276 void
3277 instantiate_virtual_regs (fndecl, insns)
3278 tree fndecl;
3279 rtx insns;
3280 {
3281 rtx insn;
3282 int i;
3283
3284 /* Compute the offsets to use for this function. */
3285 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3286 var_offset = STARTING_FRAME_OFFSET;
3287 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3288 out_arg_offset = STACK_POINTER_OFFSET;
3289 cfa_offset = ARG_POINTER_CFA_OFFSET;
3290
3291 /* Scan all variables and parameters of this function. For each that is
3292 in memory, instantiate all virtual registers if the result is a valid
3293 address. If not, we do it later. That will handle most uses of virtual
3294 regs on many machines. */
3295 instantiate_decls (fndecl, 1);
3296
3297 /* Initialize recognition, indicating that volatile is OK. */
3298 init_recog ();
3299
3300 /* Scan through all the insns, instantiating every virtual register still
3301 present. */
3302 for (insn = insns; insn; insn = NEXT_INSN (insn))
3303 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3304 || GET_CODE (insn) == CALL_INSN)
3305 {
3306 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3307 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3308 }
3309
3310 /* Instantiate the stack slots for the parm registers, for later use in
3311 addressof elimination. */
3312 for (i = 0; i < max_parm_reg; ++i)
3313 if (parm_reg_stack_loc[i])
3314 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3315
3316 /* Now instantiate the remaining register equivalences for debugging info.
3317 These will not be valid addresses. */
3318 instantiate_decls (fndecl, 0);
3319
3320 /* Indicate that, from now on, assign_stack_local should use
3321 frame_pointer_rtx. */
3322 virtuals_instantiated = 1;
3323 }
3324
3325 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3326 all virtual registers in their DECL_RTL's.
3327
3328 If VALID_ONLY, do this only if the resulting address is still valid.
3329 Otherwise, always do it. */
3330
3331 static void
3332 instantiate_decls (fndecl, valid_only)
3333 tree fndecl;
3334 int valid_only;
3335 {
3336 tree decl;
3337
3338 if (DECL_SAVED_INSNS (fndecl))
3339 /* When compiling an inline function, the obstack used for
3340 rtl allocation is the maybepermanent_obstack. Calling
3341 `resume_temporary_allocation' switches us back to that
3342 obstack while we process this function's parameters. */
3343 resume_temporary_allocation ();
3344
3345 /* Process all parameters of the function. */
3346 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3347 {
3348 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3349
3350 instantiate_decl (DECL_RTL (decl), size, valid_only);
3351
3352 /* If the parameter was promoted, then the incoming RTL mode may be
3353 larger than the declared type size. We must use the larger of
3354 the two sizes. */
3355 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3356 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3357 }
3358
3359 /* Now process all variables defined in the function or its subblocks. */
3360 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3361
3362 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3363 {
3364 /* Save all rtl allocated for this function by raising the
3365 high-water mark on the maybepermanent_obstack. */
3366 preserve_data ();
3367 /* All further rtl allocation is now done in the current_obstack. */
3368 rtl_in_current_obstack ();
3369 }
3370 }
3371
3372 /* Subroutine of instantiate_decls: Process all decls in the given
3373 BLOCK node and all its subblocks. */
3374
3375 static void
3376 instantiate_decls_1 (let, valid_only)
3377 tree let;
3378 int valid_only;
3379 {
3380 tree t;
3381
3382 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3383 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3384 valid_only);
3385
3386 /* Process all subblocks. */
3387 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3388 instantiate_decls_1 (t, valid_only);
3389 }
3390
3391 /* Subroutine of the preceding procedures: Given RTL representing a
3392 decl and the size of the object, do any instantiation required.
3393
3394 If VALID_ONLY is non-zero, it means that the RTL should only be
3395 changed if the new address is valid. */
3396
3397 static void
3398 instantiate_decl (x, size, valid_only)
3399 rtx x;
3400 int size;
3401 int valid_only;
3402 {
3403 enum machine_mode mode;
3404 rtx addr;
3405
3406 /* If this is not a MEM, no need to do anything. Similarly if the
3407 address is a constant or a register that is not a virtual register. */
3408
3409 if (x == 0 || GET_CODE (x) != MEM)
3410 return;
3411
3412 addr = XEXP (x, 0);
3413 if (CONSTANT_P (addr)
3414 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3415 || (GET_CODE (addr) == REG
3416 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3417 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3418 return;
3419
3420 /* If we should only do this if the address is valid, copy the address.
3421 We need to do this so we can undo any changes that might make the
3422 address invalid. This copy is unfortunate, but probably can't be
3423 avoided. */
3424
3425 if (valid_only)
3426 addr = copy_rtx (addr);
3427
3428 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3429
3430 if (valid_only)
3431 {
3432 /* Now verify that the resulting address is valid for every integer or
3433 floating-point mode up to and including SIZE bytes long. We do this
3434 since the object might be accessed in any mode and frame addresses
3435 are shared. */
3436
3437 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3438 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3439 mode = GET_MODE_WIDER_MODE (mode))
3440 if (! memory_address_p (mode, addr))
3441 return;
3442
3443 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3444 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3445 mode = GET_MODE_WIDER_MODE (mode))
3446 if (! memory_address_p (mode, addr))
3447 return;
3448 }
3449
3450 /* Put back the address now that we have updated it and we either know
3451 it is valid or we don't care whether it is valid. */
3452
3453 XEXP (x, 0) = addr;
3454 }
3455 \f
3456 /* Given a pointer to a piece of rtx and an optional pointer to the
3457 containing object, instantiate any virtual registers present in it.
3458
3459 If EXTRA_INSNS, we always do the replacement and generate
3460 any extra insns before OBJECT. If it zero, we do nothing if replacement
3461 is not valid.
3462
3463 Return 1 if we either had nothing to do or if we were able to do the
3464 needed replacement. Return 0 otherwise; we only return zero if
3465 EXTRA_INSNS is zero.
3466
3467 We first try some simple transformations to avoid the creation of extra
3468 pseudos. */
3469
3470 static int
3471 instantiate_virtual_regs_1 (loc, object, extra_insns)
3472 rtx *loc;
3473 rtx object;
3474 int extra_insns;
3475 {
3476 rtx x;
3477 RTX_CODE code;
3478 rtx new = 0;
3479 HOST_WIDE_INT offset = 0;
3480 rtx temp;
3481 rtx seq;
3482 int i, j;
3483 const char *fmt;
3484
3485 /* Re-start here to avoid recursion in common cases. */
3486 restart:
3487
3488 x = *loc;
3489 if (x == 0)
3490 return 1;
3491
3492 code = GET_CODE (x);
3493
3494 /* Check for some special cases. */
3495 switch (code)
3496 {
3497 case CONST_INT:
3498 case CONST_DOUBLE:
3499 case CONST:
3500 case SYMBOL_REF:
3501 case CODE_LABEL:
3502 case PC:
3503 case CC0:
3504 case ASM_INPUT:
3505 case ADDR_VEC:
3506 case ADDR_DIFF_VEC:
3507 case RETURN:
3508 return 1;
3509
3510 case SET:
3511 /* We are allowed to set the virtual registers. This means that
3512 the actual register should receive the source minus the
3513 appropriate offset. This is used, for example, in the handling
3514 of non-local gotos. */
3515 if (SET_DEST (x) == virtual_incoming_args_rtx)
3516 new = arg_pointer_rtx, offset = - in_arg_offset;
3517 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3518 new = frame_pointer_rtx, offset = - var_offset;
3519 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3520 new = stack_pointer_rtx, offset = - dynamic_offset;
3521 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3522 new = stack_pointer_rtx, offset = - out_arg_offset;
3523 else if (SET_DEST (x) == virtual_cfa_rtx)
3524 new = arg_pointer_rtx, offset = - cfa_offset;
3525
3526 if (new)
3527 {
3528 rtx src = SET_SRC (x);
3529
3530 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3531
3532 /* The only valid sources here are PLUS or REG. Just do
3533 the simplest possible thing to handle them. */
3534 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3535 abort ();
3536
3537 start_sequence ();
3538 if (GET_CODE (src) != REG)
3539 temp = force_operand (src, NULL_RTX);
3540 else
3541 temp = src;
3542 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3543 seq = get_insns ();
3544 end_sequence ();
3545
3546 emit_insns_before (seq, object);
3547 SET_DEST (x) = new;
3548
3549 if (! validate_change (object, &SET_SRC (x), temp, 0)
3550 || ! extra_insns)
3551 abort ();
3552
3553 return 1;
3554 }
3555
3556 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3557 loc = &SET_SRC (x);
3558 goto restart;
3559
3560 case PLUS:
3561 /* Handle special case of virtual register plus constant. */
3562 if (CONSTANT_P (XEXP (x, 1)))
3563 {
3564 rtx old, new_offset;
3565
3566 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3567 if (GET_CODE (XEXP (x, 0)) == PLUS)
3568 {
3569 rtx inner = XEXP (XEXP (x, 0), 0);
3570
3571 if (inner == virtual_incoming_args_rtx)
3572 new = arg_pointer_rtx, offset = in_arg_offset;
3573 else if (inner == virtual_stack_vars_rtx)
3574 new = frame_pointer_rtx, offset = var_offset;
3575 else if (inner == virtual_stack_dynamic_rtx)
3576 new = stack_pointer_rtx, offset = dynamic_offset;
3577 else if (inner == virtual_outgoing_args_rtx)
3578 new = stack_pointer_rtx, offset = out_arg_offset;
3579 else if (inner == virtual_cfa_rtx)
3580 new = arg_pointer_rtx, offset = cfa_offset;
3581 else
3582 {
3583 loc = &XEXP (x, 0);
3584 goto restart;
3585 }
3586
3587 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3588 extra_insns);
3589 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3590 }
3591
3592 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3593 new = arg_pointer_rtx, offset = in_arg_offset;
3594 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3595 new = frame_pointer_rtx, offset = var_offset;
3596 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3597 new = stack_pointer_rtx, offset = dynamic_offset;
3598 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3599 new = stack_pointer_rtx, offset = out_arg_offset;
3600 else if (XEXP (x, 0) == virtual_cfa_rtx)
3601 new = arg_pointer_rtx, offset = cfa_offset;
3602 else
3603 {
3604 /* We know the second operand is a constant. Unless the
3605 first operand is a REG (which has been already checked),
3606 it needs to be checked. */
3607 if (GET_CODE (XEXP (x, 0)) != REG)
3608 {
3609 loc = &XEXP (x, 0);
3610 goto restart;
3611 }
3612 return 1;
3613 }
3614
3615 new_offset = plus_constant (XEXP (x, 1), offset);
3616
3617 /* If the new constant is zero, try to replace the sum with just
3618 the register. */
3619 if (new_offset == const0_rtx
3620 && validate_change (object, loc, new, 0))
3621 return 1;
3622
3623 /* Next try to replace the register and new offset.
3624 There are two changes to validate here and we can't assume that
3625 in the case of old offset equals new just changing the register
3626 will yield a valid insn. In the interests of a little efficiency,
3627 however, we only call validate change once (we don't queue up the
3628 changes and then call apply_change_group). */
3629
3630 old = XEXP (x, 0);
3631 if (offset == 0
3632 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3633 : (XEXP (x, 0) = new,
3634 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3635 {
3636 if (! extra_insns)
3637 {
3638 XEXP (x, 0) = old;
3639 return 0;
3640 }
3641
3642 /* Otherwise copy the new constant into a register and replace
3643 constant with that register. */
3644 temp = gen_reg_rtx (Pmode);
3645 XEXP (x, 0) = new;
3646 if (validate_change (object, &XEXP (x, 1), temp, 0))
3647 emit_insn_before (gen_move_insn (temp, new_offset), object);
3648 else
3649 {
3650 /* If that didn't work, replace this expression with a
3651 register containing the sum. */
3652
3653 XEXP (x, 0) = old;
3654 new = gen_rtx_PLUS (Pmode, new, new_offset);
3655
3656 start_sequence ();
3657 temp = force_operand (new, NULL_RTX);
3658 seq = get_insns ();
3659 end_sequence ();
3660
3661 emit_insns_before (seq, object);
3662 if (! validate_change (object, loc, temp, 0)
3663 && ! validate_replace_rtx (x, temp, object))
3664 abort ();
3665 }
3666 }
3667
3668 return 1;
3669 }
3670
3671 /* Fall through to generic two-operand expression case. */
3672 case EXPR_LIST:
3673 case CALL:
3674 case COMPARE:
3675 case MINUS:
3676 case MULT:
3677 case DIV: case UDIV:
3678 case MOD: case UMOD:
3679 case AND: case IOR: case XOR:
3680 case ROTATERT: case ROTATE:
3681 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3682 case NE: case EQ:
3683 case GE: case GT: case GEU: case GTU:
3684 case LE: case LT: case LEU: case LTU:
3685 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3686 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3687 loc = &XEXP (x, 0);
3688 goto restart;
3689
3690 case MEM:
3691 /* Most cases of MEM that convert to valid addresses have already been
3692 handled by our scan of decls. The only special handling we
3693 need here is to make a copy of the rtx to ensure it isn't being
3694 shared if we have to change it to a pseudo.
3695
3696 If the rtx is a simple reference to an address via a virtual register,
3697 it can potentially be shared. In such cases, first try to make it
3698 a valid address, which can also be shared. Otherwise, copy it and
3699 proceed normally.
3700
3701 First check for common cases that need no processing. These are
3702 usually due to instantiation already being done on a previous instance
3703 of a shared rtx. */
3704
3705 temp = XEXP (x, 0);
3706 if (CONSTANT_ADDRESS_P (temp)
3707 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3708 || temp == arg_pointer_rtx
3709 #endif
3710 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3711 || temp == hard_frame_pointer_rtx
3712 #endif
3713 || temp == frame_pointer_rtx)
3714 return 1;
3715
3716 if (GET_CODE (temp) == PLUS
3717 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3718 && (XEXP (temp, 0) == frame_pointer_rtx
3719 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3720 || XEXP (temp, 0) == hard_frame_pointer_rtx
3721 #endif
3722 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3723 || XEXP (temp, 0) == arg_pointer_rtx
3724 #endif
3725 ))
3726 return 1;
3727
3728 if (temp == virtual_stack_vars_rtx
3729 || temp == virtual_incoming_args_rtx
3730 || (GET_CODE (temp) == PLUS
3731 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3732 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3733 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3734 {
3735 /* This MEM may be shared. If the substitution can be done without
3736 the need to generate new pseudos, we want to do it in place
3737 so all copies of the shared rtx benefit. The call below will
3738 only make substitutions if the resulting address is still
3739 valid.
3740
3741 Note that we cannot pass X as the object in the recursive call
3742 since the insn being processed may not allow all valid
3743 addresses. However, if we were not passed on object, we can
3744 only modify X without copying it if X will have a valid
3745 address.
3746
3747 ??? Also note that this can still lose if OBJECT is an insn that
3748 has less restrictions on an address that some other insn.
3749 In that case, we will modify the shared address. This case
3750 doesn't seem very likely, though. One case where this could
3751 happen is in the case of a USE or CLOBBER reference, but we
3752 take care of that below. */
3753
3754 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3755 object ? object : x, 0))
3756 return 1;
3757
3758 /* Otherwise make a copy and process that copy. We copy the entire
3759 RTL expression since it might be a PLUS which could also be
3760 shared. */
3761 *loc = x = copy_rtx (x);
3762 }
3763
3764 /* Fall through to generic unary operation case. */
3765 case SUBREG:
3766 case STRICT_LOW_PART:
3767 case NEG: case NOT:
3768 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3769 case SIGN_EXTEND: case ZERO_EXTEND:
3770 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3771 case FLOAT: case FIX:
3772 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3773 case ABS:
3774 case SQRT:
3775 case FFS:
3776 /* These case either have just one operand or we know that we need not
3777 check the rest of the operands. */
3778 loc = &XEXP (x, 0);
3779 goto restart;
3780
3781 case USE:
3782 case CLOBBER:
3783 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3784 go ahead and make the invalid one, but do it to a copy. For a REG,
3785 just make the recursive call, since there's no chance of a problem. */
3786
3787 if ((GET_CODE (XEXP (x, 0)) == MEM
3788 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3789 0))
3790 || (GET_CODE (XEXP (x, 0)) == REG
3791 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3792 return 1;
3793
3794 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3795 loc = &XEXP (x, 0);
3796 goto restart;
3797
3798 case REG:
3799 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3800 in front of this insn and substitute the temporary. */
3801 if (x == virtual_incoming_args_rtx)
3802 new = arg_pointer_rtx, offset = in_arg_offset;
3803 else if (x == virtual_stack_vars_rtx)
3804 new = frame_pointer_rtx, offset = var_offset;
3805 else if (x == virtual_stack_dynamic_rtx)
3806 new = stack_pointer_rtx, offset = dynamic_offset;
3807 else if (x == virtual_outgoing_args_rtx)
3808 new = stack_pointer_rtx, offset = out_arg_offset;
3809 else if (x == virtual_cfa_rtx)
3810 new = arg_pointer_rtx, offset = cfa_offset;
3811
3812 if (new)
3813 {
3814 temp = plus_constant (new, offset);
3815 if (!validate_change (object, loc, temp, 0))
3816 {
3817 if (! extra_insns)
3818 return 0;
3819
3820 start_sequence ();
3821 temp = force_operand (temp, NULL_RTX);
3822 seq = get_insns ();
3823 end_sequence ();
3824
3825 emit_insns_before (seq, object);
3826 if (! validate_change (object, loc, temp, 0)
3827 && ! validate_replace_rtx (x, temp, object))
3828 abort ();
3829 }
3830 }
3831
3832 return 1;
3833
3834 case ADDRESSOF:
3835 if (GET_CODE (XEXP (x, 0)) == REG)
3836 return 1;
3837
3838 else if (GET_CODE (XEXP (x, 0)) == MEM)
3839 {
3840 /* If we have a (addressof (mem ..)), do any instantiation inside
3841 since we know we'll be making the inside valid when we finally
3842 remove the ADDRESSOF. */
3843 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3844 return 1;
3845 }
3846 break;
3847
3848 default:
3849 break;
3850 }
3851
3852 /* Scan all subexpressions. */
3853 fmt = GET_RTX_FORMAT (code);
3854 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3855 if (*fmt == 'e')
3856 {
3857 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3858 return 0;
3859 }
3860 else if (*fmt == 'E')
3861 for (j = 0; j < XVECLEN (x, i); j++)
3862 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3863 extra_insns))
3864 return 0;
3865
3866 return 1;
3867 }
3868 \f
3869 /* Optimization: assuming this function does not receive nonlocal gotos,
3870 delete the handlers for such, as well as the insns to establish
3871 and disestablish them. */
3872
3873 static void
3874 delete_handlers ()
3875 {
3876 rtx insn;
3877 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3878 {
3879 /* Delete the handler by turning off the flag that would
3880 prevent jump_optimize from deleting it.
3881 Also permit deletion of the nonlocal labels themselves
3882 if nothing local refers to them. */
3883 if (GET_CODE (insn) == CODE_LABEL)
3884 {
3885 tree t, last_t;
3886
3887 LABEL_PRESERVE_P (insn) = 0;
3888
3889 /* Remove it from the nonlocal_label list, to avoid confusing
3890 flow. */
3891 for (t = nonlocal_labels, last_t = 0; t;
3892 last_t = t, t = TREE_CHAIN (t))
3893 if (DECL_RTL (TREE_VALUE (t)) == insn)
3894 break;
3895 if (t)
3896 {
3897 if (! last_t)
3898 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3899 else
3900 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3901 }
3902 }
3903 if (GET_CODE (insn) == INSN)
3904 {
3905 int can_delete = 0;
3906 rtx t;
3907 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3908 if (reg_mentioned_p (t, PATTERN (insn)))
3909 {
3910 can_delete = 1;
3911 break;
3912 }
3913 if (can_delete
3914 || (nonlocal_goto_stack_level != 0
3915 && reg_mentioned_p (nonlocal_goto_stack_level,
3916 PATTERN (insn))))
3917 delete_insn (insn);
3918 }
3919 }
3920 }
3921 \f
3922 int
3923 max_parm_reg_num ()
3924 {
3925 return max_parm_reg;
3926 }
3927
3928 /* Return the first insn following those generated by `assign_parms'. */
3929
3930 rtx
3931 get_first_nonparm_insn ()
3932 {
3933 if (last_parm_insn)
3934 return NEXT_INSN (last_parm_insn);
3935 return get_insns ();
3936 }
3937
3938 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3939 Crash if there is none. */
3940
3941 rtx
3942 get_first_block_beg ()
3943 {
3944 register rtx searcher;
3945 register rtx insn = get_first_nonparm_insn ();
3946
3947 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3948 if (GET_CODE (searcher) == NOTE
3949 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3950 return searcher;
3951
3952 abort (); /* Invalid call to this function. (See comments above.) */
3953 return NULL_RTX;
3954 }
3955
3956 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3957 This means a type for which function calls must pass an address to the
3958 function or get an address back from the function.
3959 EXP may be a type node or an expression (whose type is tested). */
3960
3961 int
3962 aggregate_value_p (exp)
3963 tree exp;
3964 {
3965 int i, regno, nregs;
3966 rtx reg;
3967 tree type;
3968 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3969 type = exp;
3970 else
3971 type = TREE_TYPE (exp);
3972
3973 if (RETURN_IN_MEMORY (type))
3974 return 1;
3975 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3976 and thus can't be returned in registers. */
3977 if (TREE_ADDRESSABLE (type))
3978 return 1;
3979 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3980 return 1;
3981 /* Make sure we have suitable call-clobbered regs to return
3982 the value in; if not, we must return it in memory. */
3983 reg = hard_function_value (type, 0, 0);
3984
3985 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3986 it is OK. */
3987 if (GET_CODE (reg) != REG)
3988 return 0;
3989
3990 regno = REGNO (reg);
3991 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3992 for (i = 0; i < nregs; i++)
3993 if (! call_used_regs[regno + i])
3994 return 1;
3995 return 0;
3996 }
3997 \f
3998 /* Assign RTL expressions to the function's parameters.
3999 This may involve copying them into registers and using
4000 those registers as the RTL for them. */
4001
4002 void
4003 assign_parms (fndecl)
4004 tree fndecl;
4005 {
4006 register tree parm;
4007 register rtx entry_parm = 0;
4008 register rtx stack_parm = 0;
4009 CUMULATIVE_ARGS args_so_far;
4010 enum machine_mode promoted_mode, passed_mode;
4011 enum machine_mode nominal_mode, promoted_nominal_mode;
4012 int unsignedp;
4013 /* Total space needed so far for args on the stack,
4014 given as a constant and a tree-expression. */
4015 struct args_size stack_args_size;
4016 tree fntype = TREE_TYPE (fndecl);
4017 tree fnargs = DECL_ARGUMENTS (fndecl);
4018 /* This is used for the arg pointer when referring to stack args. */
4019 rtx internal_arg_pointer;
4020 /* This is a dummy PARM_DECL that we used for the function result if
4021 the function returns a structure. */
4022 tree function_result_decl = 0;
4023 #ifdef SETUP_INCOMING_VARARGS
4024 int varargs_setup = 0;
4025 #endif
4026 rtx conversion_insns = 0;
4027 struct args_size alignment_pad;
4028
4029 /* Nonzero if the last arg is named `__builtin_va_alist',
4030 which is used on some machines for old-fashioned non-ANSI varargs.h;
4031 this should be stuck onto the stack as if it had arrived there. */
4032 int hide_last_arg
4033 = (current_function_varargs
4034 && fnargs
4035 && (parm = tree_last (fnargs)) != 0
4036 && DECL_NAME (parm)
4037 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4038 "__builtin_va_alist")));
4039
4040 /* Nonzero if function takes extra anonymous args.
4041 This means the last named arg must be on the stack
4042 right before the anonymous ones. */
4043 int stdarg
4044 = (TYPE_ARG_TYPES (fntype) != 0
4045 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4046 != void_type_node));
4047
4048 current_function_stdarg = stdarg;
4049
4050 /* If the reg that the virtual arg pointer will be translated into is
4051 not a fixed reg or is the stack pointer, make a copy of the virtual
4052 arg pointer, and address parms via the copy. The frame pointer is
4053 considered fixed even though it is not marked as such.
4054
4055 The second time through, simply use ap to avoid generating rtx. */
4056
4057 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4058 || ! (fixed_regs[ARG_POINTER_REGNUM]
4059 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4060 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4061 else
4062 internal_arg_pointer = virtual_incoming_args_rtx;
4063 current_function_internal_arg_pointer = internal_arg_pointer;
4064
4065 stack_args_size.constant = 0;
4066 stack_args_size.var = 0;
4067
4068 /* If struct value address is treated as the first argument, make it so. */
4069 if (aggregate_value_p (DECL_RESULT (fndecl))
4070 && ! current_function_returns_pcc_struct
4071 && struct_value_incoming_rtx == 0)
4072 {
4073 tree type = build_pointer_type (TREE_TYPE (fntype));
4074
4075 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4076
4077 DECL_ARG_TYPE (function_result_decl) = type;
4078 TREE_CHAIN (function_result_decl) = fnargs;
4079 fnargs = function_result_decl;
4080 }
4081
4082 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4083 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4084
4085 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4086 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4087 #else
4088 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4089 #endif
4090
4091 /* We haven't yet found an argument that we must push and pretend the
4092 caller did. */
4093 current_function_pretend_args_size = 0;
4094
4095 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4096 {
4097 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4098 struct args_size stack_offset;
4099 struct args_size arg_size;
4100 int passed_pointer = 0;
4101 int did_conversion = 0;
4102 tree passed_type = DECL_ARG_TYPE (parm);
4103 tree nominal_type = TREE_TYPE (parm);
4104 int pretend_named;
4105
4106 /* Set LAST_NAMED if this is last named arg before some
4107 anonymous args. */
4108 int last_named = ((TREE_CHAIN (parm) == 0
4109 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4110 && (stdarg || current_function_varargs));
4111 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4112 most machines, if this is a varargs/stdarg function, then we treat
4113 the last named arg as if it were anonymous too. */
4114 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4115
4116 if (TREE_TYPE (parm) == error_mark_node
4117 /* This can happen after weird syntax errors
4118 or if an enum type is defined among the parms. */
4119 || TREE_CODE (parm) != PARM_DECL
4120 || passed_type == NULL)
4121 {
4122 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4123 = gen_rtx_MEM (BLKmode, const0_rtx);
4124 TREE_USED (parm) = 1;
4125 continue;
4126 }
4127
4128 /* For varargs.h function, save info about regs and stack space
4129 used by the individual args, not including the va_alist arg. */
4130 if (hide_last_arg && last_named)
4131 current_function_args_info = args_so_far;
4132
4133 /* Find mode of arg as it is passed, and mode of arg
4134 as it should be during execution of this function. */
4135 passed_mode = TYPE_MODE (passed_type);
4136 nominal_mode = TYPE_MODE (nominal_type);
4137
4138 /* If the parm's mode is VOID, its value doesn't matter,
4139 and avoid the usual things like emit_move_insn that could crash. */
4140 if (nominal_mode == VOIDmode)
4141 {
4142 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4143 continue;
4144 }
4145
4146 /* If the parm is to be passed as a transparent union, use the
4147 type of the first field for the tests below. We have already
4148 verified that the modes are the same. */
4149 if (DECL_TRANSPARENT_UNION (parm)
4150 || TYPE_TRANSPARENT_UNION (passed_type))
4151 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4152
4153 /* See if this arg was passed by invisible reference. It is if
4154 it is an object whose size depends on the contents of the
4155 object itself or if the machine requires these objects be passed
4156 that way. */
4157
4158 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4159 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4160 || TREE_ADDRESSABLE (passed_type)
4161 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4162 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4163 passed_type, named_arg)
4164 #endif
4165 )
4166 {
4167 passed_type = nominal_type = build_pointer_type (passed_type);
4168 passed_pointer = 1;
4169 passed_mode = nominal_mode = Pmode;
4170 }
4171
4172 promoted_mode = passed_mode;
4173
4174 #ifdef PROMOTE_FUNCTION_ARGS
4175 /* Compute the mode in which the arg is actually extended to. */
4176 unsignedp = TREE_UNSIGNED (passed_type);
4177 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4178 #endif
4179
4180 /* Let machine desc say which reg (if any) the parm arrives in.
4181 0 means it arrives on the stack. */
4182 #ifdef FUNCTION_INCOMING_ARG
4183 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4184 passed_type, named_arg);
4185 #else
4186 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4187 passed_type, named_arg);
4188 #endif
4189
4190 if (entry_parm == 0)
4191 promoted_mode = passed_mode;
4192
4193 #ifdef SETUP_INCOMING_VARARGS
4194 /* If this is the last named parameter, do any required setup for
4195 varargs or stdargs. We need to know about the case of this being an
4196 addressable type, in which case we skip the registers it
4197 would have arrived in.
4198
4199 For stdargs, LAST_NAMED will be set for two parameters, the one that
4200 is actually the last named, and the dummy parameter. We only
4201 want to do this action once.
4202
4203 Also, indicate when RTL generation is to be suppressed. */
4204 if (last_named && !varargs_setup)
4205 {
4206 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4207 current_function_pretend_args_size, 0);
4208 varargs_setup = 1;
4209 }
4210 #endif
4211
4212 /* Determine parm's home in the stack,
4213 in case it arrives in the stack or we should pretend it did.
4214
4215 Compute the stack position and rtx where the argument arrives
4216 and its size.
4217
4218 There is one complexity here: If this was a parameter that would
4219 have been passed in registers, but wasn't only because it is
4220 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4221 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4222 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4223 0 as it was the previous time. */
4224
4225 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4226 locate_and_pad_parm (promoted_mode, passed_type,
4227 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4228 1,
4229 #else
4230 #ifdef FUNCTION_INCOMING_ARG
4231 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4232 passed_type,
4233 pretend_named) != 0,
4234 #else
4235 FUNCTION_ARG (args_so_far, promoted_mode,
4236 passed_type,
4237 pretend_named) != 0,
4238 #endif
4239 #endif
4240 fndecl, &stack_args_size, &stack_offset, &arg_size,
4241 &alignment_pad);
4242
4243 {
4244 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4245
4246 if (offset_rtx == const0_rtx)
4247 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4248 else
4249 stack_parm = gen_rtx_MEM (promoted_mode,
4250 gen_rtx_PLUS (Pmode,
4251 internal_arg_pointer,
4252 offset_rtx));
4253
4254 /* If this is a memory ref that contains aggregate components,
4255 mark it as such for cse and loop optimize. Likewise if it
4256 is readonly. */
4257 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4258 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4259 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4260 }
4261
4262 /* If this parameter was passed both in registers and in the stack,
4263 use the copy on the stack. */
4264 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4265 entry_parm = 0;
4266
4267 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4268 /* If this parm was passed part in regs and part in memory,
4269 pretend it arrived entirely in memory
4270 by pushing the register-part onto the stack.
4271
4272 In the special case of a DImode or DFmode that is split,
4273 we could put it together in a pseudoreg directly,
4274 but for now that's not worth bothering with. */
4275
4276 if (entry_parm)
4277 {
4278 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4279 passed_type, named_arg);
4280
4281 if (nregs > 0)
4282 {
4283 current_function_pretend_args_size
4284 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4285 / (PARM_BOUNDARY / BITS_PER_UNIT)
4286 * (PARM_BOUNDARY / BITS_PER_UNIT));
4287
4288 /* Handle calls that pass values in multiple non-contiguous
4289 locations. The Irix 6 ABI has examples of this. */
4290 if (GET_CODE (entry_parm) == PARALLEL)
4291 emit_group_store (validize_mem (stack_parm), entry_parm,
4292 int_size_in_bytes (TREE_TYPE (parm)),
4293 (TYPE_ALIGN (TREE_TYPE (parm))
4294 / BITS_PER_UNIT));
4295 else
4296 move_block_from_reg (REGNO (entry_parm),
4297 validize_mem (stack_parm), nregs,
4298 int_size_in_bytes (TREE_TYPE (parm)));
4299
4300 entry_parm = stack_parm;
4301 }
4302 }
4303 #endif
4304
4305 /* If we didn't decide this parm came in a register,
4306 by default it came on the stack. */
4307 if (entry_parm == 0)
4308 entry_parm = stack_parm;
4309
4310 /* Record permanently how this parm was passed. */
4311 DECL_INCOMING_RTL (parm) = entry_parm;
4312
4313 /* If there is actually space on the stack for this parm,
4314 count it in stack_args_size; otherwise set stack_parm to 0
4315 to indicate there is no preallocated stack slot for the parm. */
4316
4317 if (entry_parm == stack_parm
4318 || (GET_CODE (entry_parm) == PARALLEL
4319 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4320 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4321 /* On some machines, even if a parm value arrives in a register
4322 there is still an (uninitialized) stack slot allocated for it.
4323
4324 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4325 whether this parameter already has a stack slot allocated,
4326 because an arg block exists only if current_function_args_size
4327 is larger than some threshold, and we haven't calculated that
4328 yet. So, for now, we just assume that stack slots never exist
4329 in this case. */
4330 || REG_PARM_STACK_SPACE (fndecl) > 0
4331 #endif
4332 )
4333 {
4334 stack_args_size.constant += arg_size.constant;
4335 if (arg_size.var)
4336 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4337 }
4338 else
4339 /* No stack slot was pushed for this parm. */
4340 stack_parm = 0;
4341
4342 /* Update info on where next arg arrives in registers. */
4343
4344 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4345 passed_type, named_arg);
4346
4347 /* If we can't trust the parm stack slot to be aligned enough
4348 for its ultimate type, don't use that slot after entry.
4349 We'll make another stack slot, if we need one. */
4350 {
4351 unsigned int thisparm_boundary
4352 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4353
4354 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4355 stack_parm = 0;
4356 }
4357
4358 /* If parm was passed in memory, and we need to convert it on entry,
4359 don't store it back in that same slot. */
4360 if (entry_parm != 0
4361 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4362 stack_parm = 0;
4363
4364 #if 0
4365 /* Now adjust STACK_PARM to the mode and precise location
4366 where this parameter should live during execution,
4367 if we discover that it must live in the stack during execution.
4368 To make debuggers happier on big-endian machines, we store
4369 the value in the last bytes of the space available. */
4370
4371 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4372 && stack_parm != 0)
4373 {
4374 rtx offset_rtx;
4375
4376 if (BYTES_BIG_ENDIAN
4377 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4378 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4379 - GET_MODE_SIZE (nominal_mode));
4380
4381 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4382 if (offset_rtx == const0_rtx)
4383 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4384 else
4385 stack_parm = gen_rtx_MEM (nominal_mode,
4386 gen_rtx_PLUS (Pmode,
4387 internal_arg_pointer,
4388 offset_rtx));
4389
4390 /* If this is a memory ref that contains aggregate components,
4391 mark it as such for cse and loop optimize. */
4392 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4393 }
4394 #endif /* 0 */
4395
4396 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4397 in the mode in which it arrives.
4398 STACK_PARM is an RTX for a stack slot where the parameter can live
4399 during the function (in case we want to put it there).
4400 STACK_PARM is 0 if no stack slot was pushed for it.
4401
4402 Now output code if necessary to convert ENTRY_PARM to
4403 the type in which this function declares it,
4404 and store that result in an appropriate place,
4405 which may be a pseudo reg, may be STACK_PARM,
4406 or may be a local stack slot if STACK_PARM is 0.
4407
4408 Set DECL_RTL to that place. */
4409
4410 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4411 {
4412 /* If a BLKmode arrives in registers, copy it to a stack slot.
4413 Handle calls that pass values in multiple non-contiguous
4414 locations. The Irix 6 ABI has examples of this. */
4415 if (GET_CODE (entry_parm) == REG
4416 || GET_CODE (entry_parm) == PARALLEL)
4417 {
4418 int size_stored
4419 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4420 UNITS_PER_WORD);
4421
4422 /* Note that we will be storing an integral number of words.
4423 So we have to be careful to ensure that we allocate an
4424 integral number of words. We do this below in the
4425 assign_stack_local if space was not allocated in the argument
4426 list. If it was, this will not work if PARM_BOUNDARY is not
4427 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4428 if it becomes a problem. */
4429
4430 if (stack_parm == 0)
4431 {
4432 stack_parm
4433 = assign_stack_local (GET_MODE (entry_parm),
4434 size_stored, 0);
4435
4436 /* If this is a memory ref that contains aggregate
4437 components, mark it as such for cse and loop optimize. */
4438 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4439 }
4440
4441 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4442 abort ();
4443
4444 if (TREE_READONLY (parm))
4445 RTX_UNCHANGING_P (stack_parm) = 1;
4446
4447 /* Handle calls that pass values in multiple non-contiguous
4448 locations. The Irix 6 ABI has examples of this. */
4449 if (GET_CODE (entry_parm) == PARALLEL)
4450 emit_group_store (validize_mem (stack_parm), entry_parm,
4451 int_size_in_bytes (TREE_TYPE (parm)),
4452 (TYPE_ALIGN (TREE_TYPE (parm))
4453 / BITS_PER_UNIT));
4454 else
4455 move_block_from_reg (REGNO (entry_parm),
4456 validize_mem (stack_parm),
4457 size_stored / UNITS_PER_WORD,
4458 int_size_in_bytes (TREE_TYPE (parm)));
4459 }
4460 DECL_RTL (parm) = stack_parm;
4461 }
4462 else if (! ((! optimize
4463 && ! DECL_REGISTER (parm)
4464 && ! DECL_INLINE (fndecl))
4465 /* layout_decl may set this. */
4466 || TREE_ADDRESSABLE (parm)
4467 || TREE_SIDE_EFFECTS (parm)
4468 /* If -ffloat-store specified, don't put explicit
4469 float variables into registers. */
4470 || (flag_float_store
4471 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4472 /* Always assign pseudo to structure return or item passed
4473 by invisible reference. */
4474 || passed_pointer || parm == function_result_decl)
4475 {
4476 /* Store the parm in a pseudoregister during the function, but we
4477 may need to do it in a wider mode. */
4478
4479 register rtx parmreg;
4480 int regno, regnoi = 0, regnor = 0;
4481
4482 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4483
4484 promoted_nominal_mode
4485 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4486
4487 parmreg = gen_reg_rtx (promoted_nominal_mode);
4488 mark_user_reg (parmreg);
4489
4490 /* If this was an item that we received a pointer to, set DECL_RTL
4491 appropriately. */
4492 if (passed_pointer)
4493 {
4494 DECL_RTL (parm)
4495 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4496 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4497 }
4498 else
4499 DECL_RTL (parm) = parmreg;
4500
4501 /* Copy the value into the register. */
4502 if (nominal_mode != passed_mode
4503 || promoted_nominal_mode != promoted_mode)
4504 {
4505 int save_tree_used;
4506 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4507 mode, by the caller. We now have to convert it to
4508 NOMINAL_MODE, if different. However, PARMREG may be in
4509 a different mode than NOMINAL_MODE if it is being stored
4510 promoted.
4511
4512 If ENTRY_PARM is a hard register, it might be in a register
4513 not valid for operating in its mode (e.g., an odd-numbered
4514 register for a DFmode). In that case, moves are the only
4515 thing valid, so we can't do a convert from there. This
4516 occurs when the calling sequence allow such misaligned
4517 usages.
4518
4519 In addition, the conversion may involve a call, which could
4520 clobber parameters which haven't been copied to pseudo
4521 registers yet. Therefore, we must first copy the parm to
4522 a pseudo reg here, and save the conversion until after all
4523 parameters have been moved. */
4524
4525 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4526
4527 emit_move_insn (tempreg, validize_mem (entry_parm));
4528
4529 push_to_sequence (conversion_insns);
4530 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4531
4532 /* TREE_USED gets set erroneously during expand_assignment. */
4533 save_tree_used = TREE_USED (parm);
4534 expand_assignment (parm,
4535 make_tree (nominal_type, tempreg), 0, 0);
4536 TREE_USED (parm) = save_tree_used;
4537 conversion_insns = get_insns ();
4538 did_conversion = 1;
4539 end_sequence ();
4540 }
4541 else
4542 emit_move_insn (parmreg, validize_mem (entry_parm));
4543
4544 /* If we were passed a pointer but the actual value
4545 can safely live in a register, put it in one. */
4546 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4547 && ! ((! optimize
4548 && ! DECL_REGISTER (parm)
4549 && ! DECL_INLINE (fndecl))
4550 /* layout_decl may set this. */
4551 || TREE_ADDRESSABLE (parm)
4552 || TREE_SIDE_EFFECTS (parm)
4553 /* If -ffloat-store specified, don't put explicit
4554 float variables into registers. */
4555 || (flag_float_store
4556 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4557 {
4558 /* We can't use nominal_mode, because it will have been set to
4559 Pmode above. We must use the actual mode of the parm. */
4560 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4561 mark_user_reg (parmreg);
4562 emit_move_insn (parmreg, DECL_RTL (parm));
4563 DECL_RTL (parm) = parmreg;
4564 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4565 now the parm. */
4566 stack_parm = 0;
4567 }
4568 #ifdef FUNCTION_ARG_CALLEE_COPIES
4569 /* If we are passed an arg by reference and it is our responsibility
4570 to make a copy, do it now.
4571 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4572 original argument, so we must recreate them in the call to
4573 FUNCTION_ARG_CALLEE_COPIES. */
4574 /* ??? Later add code to handle the case that if the argument isn't
4575 modified, don't do the copy. */
4576
4577 else if (passed_pointer
4578 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4579 TYPE_MODE (DECL_ARG_TYPE (parm)),
4580 DECL_ARG_TYPE (parm),
4581 named_arg)
4582 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4583 {
4584 rtx copy;
4585 tree type = DECL_ARG_TYPE (parm);
4586
4587 /* This sequence may involve a library call perhaps clobbering
4588 registers that haven't been copied to pseudos yet. */
4589
4590 push_to_sequence (conversion_insns);
4591
4592 if (TYPE_SIZE (type) == 0
4593 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4594 /* This is a variable sized object. */
4595 copy = gen_rtx_MEM (BLKmode,
4596 allocate_dynamic_stack_space
4597 (expr_size (parm), NULL_RTX,
4598 TYPE_ALIGN (type)));
4599 else
4600 copy = assign_stack_temp (TYPE_MODE (type),
4601 int_size_in_bytes (type), 1);
4602 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4603 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4604
4605 store_expr (parm, copy, 0);
4606 emit_move_insn (parmreg, XEXP (copy, 0));
4607 if (current_function_check_memory_usage)
4608 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4609 XEXP (copy, 0), Pmode,
4610 GEN_INT (int_size_in_bytes (type)),
4611 TYPE_MODE (sizetype),
4612 GEN_INT (MEMORY_USE_RW),
4613 TYPE_MODE (integer_type_node));
4614 conversion_insns = get_insns ();
4615 did_conversion = 1;
4616 end_sequence ();
4617 }
4618 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4619
4620 /* In any case, record the parm's desired stack location
4621 in case we later discover it must live in the stack.
4622
4623 If it is a COMPLEX value, store the stack location for both
4624 halves. */
4625
4626 if (GET_CODE (parmreg) == CONCAT)
4627 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4628 else
4629 regno = REGNO (parmreg);
4630
4631 if (regno >= max_parm_reg)
4632 {
4633 rtx *new;
4634 int old_max_parm_reg = max_parm_reg;
4635
4636 /* It's slow to expand this one register at a time,
4637 but it's also rare and we need max_parm_reg to be
4638 precisely correct. */
4639 max_parm_reg = regno + 1;
4640 new = (rtx *) xrealloc (parm_reg_stack_loc,
4641 max_parm_reg * sizeof (rtx));
4642 bzero ((char *) (new + old_max_parm_reg),
4643 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4644 parm_reg_stack_loc = new;
4645 }
4646
4647 if (GET_CODE (parmreg) == CONCAT)
4648 {
4649 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4650
4651 regnor = REGNO (gen_realpart (submode, parmreg));
4652 regnoi = REGNO (gen_imagpart (submode, parmreg));
4653
4654 if (stack_parm != 0)
4655 {
4656 parm_reg_stack_loc[regnor]
4657 = gen_realpart (submode, stack_parm);
4658 parm_reg_stack_loc[regnoi]
4659 = gen_imagpart (submode, stack_parm);
4660 }
4661 else
4662 {
4663 parm_reg_stack_loc[regnor] = 0;
4664 parm_reg_stack_loc[regnoi] = 0;
4665 }
4666 }
4667 else
4668 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4669
4670 /* Mark the register as eliminable if we did no conversion
4671 and it was copied from memory at a fixed offset,
4672 and the arg pointer was not copied to a pseudo-reg.
4673 If the arg pointer is a pseudo reg or the offset formed
4674 an invalid address, such memory-equivalences
4675 as we make here would screw up life analysis for it. */
4676 if (nominal_mode == passed_mode
4677 && ! did_conversion
4678 && stack_parm != 0
4679 && GET_CODE (stack_parm) == MEM
4680 && stack_offset.var == 0
4681 && reg_mentioned_p (virtual_incoming_args_rtx,
4682 XEXP (stack_parm, 0)))
4683 {
4684 rtx linsn = get_last_insn ();
4685 rtx sinsn, set;
4686
4687 /* Mark complex types separately. */
4688 if (GET_CODE (parmreg) == CONCAT)
4689 /* Scan backwards for the set of the real and
4690 imaginary parts. */
4691 for (sinsn = linsn; sinsn != 0;
4692 sinsn = prev_nonnote_insn (sinsn))
4693 {
4694 set = single_set (sinsn);
4695 if (set != 0
4696 && SET_DEST (set) == regno_reg_rtx [regnoi])
4697 REG_NOTES (sinsn)
4698 = gen_rtx_EXPR_LIST (REG_EQUIV,
4699 parm_reg_stack_loc[regnoi],
4700 REG_NOTES (sinsn));
4701 else if (set != 0
4702 && SET_DEST (set) == regno_reg_rtx [regnor])
4703 REG_NOTES (sinsn)
4704 = gen_rtx_EXPR_LIST (REG_EQUIV,
4705 parm_reg_stack_loc[regnor],
4706 REG_NOTES (sinsn));
4707 }
4708 else if ((set = single_set (linsn)) != 0
4709 && SET_DEST (set) == parmreg)
4710 REG_NOTES (linsn)
4711 = gen_rtx_EXPR_LIST (REG_EQUIV,
4712 stack_parm, REG_NOTES (linsn));
4713 }
4714
4715 /* For pointer data type, suggest pointer register. */
4716 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4717 mark_reg_pointer (parmreg,
4718 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4719 / BITS_PER_UNIT));
4720 }
4721 else
4722 {
4723 /* Value must be stored in the stack slot STACK_PARM
4724 during function execution. */
4725
4726 if (promoted_mode != nominal_mode)
4727 {
4728 /* Conversion is required. */
4729 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4730
4731 emit_move_insn (tempreg, validize_mem (entry_parm));
4732
4733 push_to_sequence (conversion_insns);
4734 entry_parm = convert_to_mode (nominal_mode, tempreg,
4735 TREE_UNSIGNED (TREE_TYPE (parm)));
4736 if (stack_parm)
4737 {
4738 /* ??? This may need a big-endian conversion on sparc64. */
4739 stack_parm = change_address (stack_parm, nominal_mode,
4740 NULL_RTX);
4741 }
4742 conversion_insns = get_insns ();
4743 did_conversion = 1;
4744 end_sequence ();
4745 }
4746
4747 if (entry_parm != stack_parm)
4748 {
4749 if (stack_parm == 0)
4750 {
4751 stack_parm
4752 = assign_stack_local (GET_MODE (entry_parm),
4753 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4754 /* If this is a memory ref that contains aggregate components,
4755 mark it as such for cse and loop optimize. */
4756 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4757 }
4758
4759 if (promoted_mode != nominal_mode)
4760 {
4761 push_to_sequence (conversion_insns);
4762 emit_move_insn (validize_mem (stack_parm),
4763 validize_mem (entry_parm));
4764 conversion_insns = get_insns ();
4765 end_sequence ();
4766 }
4767 else
4768 emit_move_insn (validize_mem (stack_parm),
4769 validize_mem (entry_parm));
4770 }
4771 if (current_function_check_memory_usage)
4772 {
4773 push_to_sequence (conversion_insns);
4774 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4775 XEXP (stack_parm, 0), Pmode,
4776 GEN_INT (GET_MODE_SIZE (GET_MODE
4777 (entry_parm))),
4778 TYPE_MODE (sizetype),
4779 GEN_INT (MEMORY_USE_RW),
4780 TYPE_MODE (integer_type_node));
4781
4782 conversion_insns = get_insns ();
4783 end_sequence ();
4784 }
4785 DECL_RTL (parm) = stack_parm;
4786 }
4787
4788 /* If this "parameter" was the place where we are receiving the
4789 function's incoming structure pointer, set up the result. */
4790 if (parm == function_result_decl)
4791 {
4792 tree result = DECL_RESULT (fndecl);
4793 tree restype = TREE_TYPE (result);
4794
4795 DECL_RTL (result)
4796 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4797
4798 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4799 AGGREGATE_TYPE_P (restype));
4800 }
4801
4802 if (TREE_THIS_VOLATILE (parm))
4803 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4804 if (TREE_READONLY (parm))
4805 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4806 }
4807
4808 /* Output all parameter conversion instructions (possibly including calls)
4809 now that all parameters have been copied out of hard registers. */
4810 emit_insns (conversion_insns);
4811
4812 last_parm_insn = get_last_insn ();
4813
4814 current_function_args_size = stack_args_size.constant;
4815
4816 /* Adjust function incoming argument size for alignment and
4817 minimum length. */
4818
4819 #ifdef REG_PARM_STACK_SPACE
4820 #ifndef MAYBE_REG_PARM_STACK_SPACE
4821 current_function_args_size = MAX (current_function_args_size,
4822 REG_PARM_STACK_SPACE (fndecl));
4823 #endif
4824 #endif
4825
4826 #ifdef STACK_BOUNDARY
4827 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4828
4829 current_function_args_size
4830 = ((current_function_args_size + STACK_BYTES - 1)
4831 / STACK_BYTES) * STACK_BYTES;
4832 #endif
4833
4834 #ifdef ARGS_GROW_DOWNWARD
4835 current_function_arg_offset_rtx
4836 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4837 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4838 size_int (-stack_args_size.constant)),
4839 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4840 #else
4841 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4842 #endif
4843
4844 /* See how many bytes, if any, of its args a function should try to pop
4845 on return. */
4846
4847 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4848 current_function_args_size);
4849
4850 /* For stdarg.h function, save info about
4851 regs and stack space used by the named args. */
4852
4853 if (!hide_last_arg)
4854 current_function_args_info = args_so_far;
4855
4856 /* Set the rtx used for the function return value. Put this in its
4857 own variable so any optimizers that need this information don't have
4858 to include tree.h. Do this here so it gets done when an inlined
4859 function gets output. */
4860
4861 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4862 }
4863 \f
4864 /* Indicate whether REGNO is an incoming argument to the current function
4865 that was promoted to a wider mode. If so, return the RTX for the
4866 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4867 that REGNO is promoted from and whether the promotion was signed or
4868 unsigned. */
4869
4870 #ifdef PROMOTE_FUNCTION_ARGS
4871
4872 rtx
4873 promoted_input_arg (regno, pmode, punsignedp)
4874 int regno;
4875 enum machine_mode *pmode;
4876 int *punsignedp;
4877 {
4878 tree arg;
4879
4880 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4881 arg = TREE_CHAIN (arg))
4882 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4883 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4884 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4885 {
4886 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4887 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4888
4889 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4890 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4891 && mode != DECL_MODE (arg))
4892 {
4893 *pmode = DECL_MODE (arg);
4894 *punsignedp = unsignedp;
4895 return DECL_INCOMING_RTL (arg);
4896 }
4897 }
4898
4899 return 0;
4900 }
4901
4902 #endif
4903 \f
4904 /* Compute the size and offset from the start of the stacked arguments for a
4905 parm passed in mode PASSED_MODE and with type TYPE.
4906
4907 INITIAL_OFFSET_PTR points to the current offset into the stacked
4908 arguments.
4909
4910 The starting offset and size for this parm are returned in *OFFSET_PTR
4911 and *ARG_SIZE_PTR, respectively.
4912
4913 IN_REGS is non-zero if the argument will be passed in registers. It will
4914 never be set if REG_PARM_STACK_SPACE is not defined.
4915
4916 FNDECL is the function in which the argument was defined.
4917
4918 There are two types of rounding that are done. The first, controlled by
4919 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4920 list to be aligned to the specific boundary (in bits). This rounding
4921 affects the initial and starting offsets, but not the argument size.
4922
4923 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4924 optionally rounds the size of the parm to PARM_BOUNDARY. The
4925 initial offset is not affected by this rounding, while the size always
4926 is and the starting offset may be. */
4927
4928 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4929 initial_offset_ptr is positive because locate_and_pad_parm's
4930 callers pass in the total size of args so far as
4931 initial_offset_ptr. arg_size_ptr is always positive.*/
4932
4933 void
4934 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4935 initial_offset_ptr, offset_ptr, arg_size_ptr,
4936 alignment_pad)
4937 enum machine_mode passed_mode;
4938 tree type;
4939 int in_regs ATTRIBUTE_UNUSED;
4940 tree fndecl ATTRIBUTE_UNUSED;
4941 struct args_size *initial_offset_ptr;
4942 struct args_size *offset_ptr;
4943 struct args_size *arg_size_ptr;
4944 struct args_size *alignment_pad;
4945
4946 {
4947 tree sizetree
4948 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4949 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4950 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4951
4952 #ifdef REG_PARM_STACK_SPACE
4953 /* If we have found a stack parm before we reach the end of the
4954 area reserved for registers, skip that area. */
4955 if (! in_regs)
4956 {
4957 int reg_parm_stack_space = 0;
4958
4959 #ifdef MAYBE_REG_PARM_STACK_SPACE
4960 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4961 #else
4962 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4963 #endif
4964 if (reg_parm_stack_space > 0)
4965 {
4966 if (initial_offset_ptr->var)
4967 {
4968 initial_offset_ptr->var
4969 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4970 size_int (reg_parm_stack_space));
4971 initial_offset_ptr->constant = 0;
4972 }
4973 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4974 initial_offset_ptr->constant = reg_parm_stack_space;
4975 }
4976 }
4977 #endif /* REG_PARM_STACK_SPACE */
4978
4979 arg_size_ptr->var = 0;
4980 arg_size_ptr->constant = 0;
4981
4982 #ifdef ARGS_GROW_DOWNWARD
4983 if (initial_offset_ptr->var)
4984 {
4985 offset_ptr->constant = 0;
4986 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4987 initial_offset_ptr->var);
4988 }
4989 else
4990 {
4991 offset_ptr->constant = - initial_offset_ptr->constant;
4992 offset_ptr->var = 0;
4993 }
4994 if (where_pad != none
4995 && (TREE_CODE (sizetree) != INTEGER_CST
4996 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4997 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4998 SUB_PARM_SIZE (*offset_ptr, sizetree);
4999 if (where_pad != downward)
5000 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5001 if (initial_offset_ptr->var)
5002 {
5003 arg_size_ptr->var = size_binop (MINUS_EXPR,
5004 size_binop (MINUS_EXPR,
5005 integer_zero_node,
5006 initial_offset_ptr->var),
5007 offset_ptr->var);
5008 }
5009 else
5010 {
5011 arg_size_ptr->constant = (- initial_offset_ptr->constant
5012 - offset_ptr->constant);
5013 }
5014 #else /* !ARGS_GROW_DOWNWARD */
5015 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5016 *offset_ptr = *initial_offset_ptr;
5017
5018 #ifdef PUSH_ROUNDING
5019 if (passed_mode != BLKmode)
5020 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5021 #endif
5022
5023 /* Pad_below needs the pre-rounded size to know how much to pad below
5024 so this must be done before rounding up. */
5025 if (where_pad == downward
5026 /* However, BLKmode args passed in regs have their padding done elsewhere.
5027 The stack slot must be able to hold the entire register. */
5028 && !(in_regs && passed_mode == BLKmode))
5029 pad_below (offset_ptr, passed_mode, sizetree);
5030
5031 if (where_pad != none
5032 && (TREE_CODE (sizetree) != INTEGER_CST
5033 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5034 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5035
5036 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5037 #endif /* ARGS_GROW_DOWNWARD */
5038 }
5039
5040 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5041 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5042
5043 static void
5044 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5045 struct args_size *offset_ptr;
5046 int boundary;
5047 struct args_size *alignment_pad;
5048 {
5049 tree save_var = NULL_TREE;
5050 HOST_WIDE_INT save_constant = 0;
5051
5052 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5053
5054 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5055 {
5056 save_var = offset_ptr->var;
5057 save_constant = offset_ptr->constant;
5058 }
5059
5060 alignment_pad->var = NULL_TREE;
5061 alignment_pad->constant = 0;
5062
5063 if (boundary > BITS_PER_UNIT)
5064 {
5065 if (offset_ptr->var)
5066 {
5067 offset_ptr->var =
5068 #ifdef ARGS_GROW_DOWNWARD
5069 round_down
5070 #else
5071 round_up
5072 #endif
5073 (ARGS_SIZE_TREE (*offset_ptr),
5074 boundary / BITS_PER_UNIT);
5075 offset_ptr->constant = 0; /*?*/
5076 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5077 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, save_var);
5078 }
5079 else
5080 {
5081 offset_ptr->constant =
5082 #ifdef ARGS_GROW_DOWNWARD
5083 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5084 #else
5085 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5086 #endif
5087 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5088 alignment_pad->constant = offset_ptr->constant - save_constant;
5089 }
5090 }
5091 }
5092
5093 #ifndef ARGS_GROW_DOWNWARD
5094 static void
5095 pad_below (offset_ptr, passed_mode, sizetree)
5096 struct args_size *offset_ptr;
5097 enum machine_mode passed_mode;
5098 tree sizetree;
5099 {
5100 if (passed_mode != BLKmode)
5101 {
5102 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5103 offset_ptr->constant
5104 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5105 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5106 - GET_MODE_SIZE (passed_mode));
5107 }
5108 else
5109 {
5110 if (TREE_CODE (sizetree) != INTEGER_CST
5111 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5112 {
5113 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5114 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5115 /* Add it in. */
5116 ADD_PARM_SIZE (*offset_ptr, s2);
5117 SUB_PARM_SIZE (*offset_ptr, sizetree);
5118 }
5119 }
5120 }
5121 #endif
5122
5123 #ifdef ARGS_GROW_DOWNWARD
5124 static tree
5125 round_down (value, divisor)
5126 tree value;
5127 int divisor;
5128 {
5129 return size_binop (MULT_EXPR,
5130 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5131 size_int (divisor));
5132 }
5133 #endif
5134 \f
5135 /* Walk the tree of blocks describing the binding levels within a function
5136 and warn about uninitialized variables.
5137 This is done after calling flow_analysis and before global_alloc
5138 clobbers the pseudo-regs to hard regs. */
5139
5140 void
5141 uninitialized_vars_warning (block)
5142 tree block;
5143 {
5144 register tree decl, sub;
5145 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5146 {
5147 if (warn_uninitialized
5148 && TREE_CODE (decl) == VAR_DECL
5149 /* These warnings are unreliable for and aggregates
5150 because assigning the fields one by one can fail to convince
5151 flow.c that the entire aggregate was initialized.
5152 Unions are troublesome because members may be shorter. */
5153 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5154 && DECL_RTL (decl) != 0
5155 && GET_CODE (DECL_RTL (decl)) == REG
5156 /* Global optimizations can make it difficult to determine if a
5157 particular variable has been initialized. However, a VAR_DECL
5158 with a nonzero DECL_INITIAL had an initializer, so do not
5159 claim it is potentially uninitialized.
5160
5161 We do not care about the actual value in DECL_INITIAL, so we do
5162 not worry that it may be a dangling pointer. */
5163 && DECL_INITIAL (decl) == NULL_TREE
5164 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5165 warning_with_decl (decl,
5166 "`%s' might be used uninitialized in this function");
5167 if (extra_warnings
5168 && TREE_CODE (decl) == VAR_DECL
5169 && DECL_RTL (decl) != 0
5170 && GET_CODE (DECL_RTL (decl)) == REG
5171 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5172 warning_with_decl (decl,
5173 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5174 }
5175 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5176 uninitialized_vars_warning (sub);
5177 }
5178
5179 /* Do the appropriate part of uninitialized_vars_warning
5180 but for arguments instead of local variables. */
5181
5182 void
5183 setjmp_args_warning ()
5184 {
5185 register tree decl;
5186 for (decl = DECL_ARGUMENTS (current_function_decl);
5187 decl; decl = TREE_CHAIN (decl))
5188 if (DECL_RTL (decl) != 0
5189 && GET_CODE (DECL_RTL (decl)) == REG
5190 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5191 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5192 }
5193
5194 /* If this function call setjmp, put all vars into the stack
5195 unless they were declared `register'. */
5196
5197 void
5198 setjmp_protect (block)
5199 tree block;
5200 {
5201 register tree decl, sub;
5202 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5203 if ((TREE_CODE (decl) == VAR_DECL
5204 || TREE_CODE (decl) == PARM_DECL)
5205 && DECL_RTL (decl) != 0
5206 && (GET_CODE (DECL_RTL (decl)) == REG
5207 || (GET_CODE (DECL_RTL (decl)) == MEM
5208 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5209 /* If this variable came from an inline function, it must be
5210 that its life doesn't overlap the setjmp. If there was a
5211 setjmp in the function, it would already be in memory. We
5212 must exclude such variable because their DECL_RTL might be
5213 set to strange things such as virtual_stack_vars_rtx. */
5214 && ! DECL_FROM_INLINE (decl)
5215 && (
5216 #ifdef NON_SAVING_SETJMP
5217 /* If longjmp doesn't restore the registers,
5218 don't put anything in them. */
5219 NON_SAVING_SETJMP
5220 ||
5221 #endif
5222 ! DECL_REGISTER (decl)))
5223 put_var_into_stack (decl);
5224 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5225 setjmp_protect (sub);
5226 }
5227 \f
5228 /* Like the previous function, but for args instead of local variables. */
5229
5230 void
5231 setjmp_protect_args ()
5232 {
5233 register tree decl;
5234 for (decl = DECL_ARGUMENTS (current_function_decl);
5235 decl; decl = TREE_CHAIN (decl))
5236 if ((TREE_CODE (decl) == VAR_DECL
5237 || TREE_CODE (decl) == PARM_DECL)
5238 && DECL_RTL (decl) != 0
5239 && (GET_CODE (DECL_RTL (decl)) == REG
5240 || (GET_CODE (DECL_RTL (decl)) == MEM
5241 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5242 && (
5243 /* If longjmp doesn't restore the registers,
5244 don't put anything in them. */
5245 #ifdef NON_SAVING_SETJMP
5246 NON_SAVING_SETJMP
5247 ||
5248 #endif
5249 ! DECL_REGISTER (decl)))
5250 put_var_into_stack (decl);
5251 }
5252 \f
5253 /* Return the context-pointer register corresponding to DECL,
5254 or 0 if it does not need one. */
5255
5256 rtx
5257 lookup_static_chain (decl)
5258 tree decl;
5259 {
5260 tree context = decl_function_context (decl);
5261 tree link;
5262
5263 if (context == 0
5264 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5265 return 0;
5266
5267 /* We treat inline_function_decl as an alias for the current function
5268 because that is the inline function whose vars, types, etc.
5269 are being merged into the current function.
5270 See expand_inline_function. */
5271 if (context == current_function_decl || context == inline_function_decl)
5272 return virtual_stack_vars_rtx;
5273
5274 for (link = context_display; link; link = TREE_CHAIN (link))
5275 if (TREE_PURPOSE (link) == context)
5276 return RTL_EXPR_RTL (TREE_VALUE (link));
5277
5278 abort ();
5279 }
5280 \f
5281 /* Convert a stack slot address ADDR for variable VAR
5282 (from a containing function)
5283 into an address valid in this function (using a static chain). */
5284
5285 rtx
5286 fix_lexical_addr (addr, var)
5287 rtx addr;
5288 tree var;
5289 {
5290 rtx basereg;
5291 HOST_WIDE_INT displacement;
5292 tree context = decl_function_context (var);
5293 struct function *fp;
5294 rtx base = 0;
5295
5296 /* If this is the present function, we need not do anything. */
5297 if (context == current_function_decl || context == inline_function_decl)
5298 return addr;
5299
5300 for (fp = outer_function_chain; fp; fp = fp->next)
5301 if (fp->decl == context)
5302 break;
5303
5304 if (fp == 0)
5305 abort ();
5306
5307 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5308 addr = XEXP (XEXP (addr, 0), 0);
5309
5310 /* Decode given address as base reg plus displacement. */
5311 if (GET_CODE (addr) == REG)
5312 basereg = addr, displacement = 0;
5313 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5314 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5315 else
5316 abort ();
5317
5318 /* We accept vars reached via the containing function's
5319 incoming arg pointer and via its stack variables pointer. */
5320 if (basereg == fp->internal_arg_pointer)
5321 {
5322 /* If reached via arg pointer, get the arg pointer value
5323 out of that function's stack frame.
5324
5325 There are two cases: If a separate ap is needed, allocate a
5326 slot in the outer function for it and dereference it that way.
5327 This is correct even if the real ap is actually a pseudo.
5328 Otherwise, just adjust the offset from the frame pointer to
5329 compensate. */
5330
5331 #ifdef NEED_SEPARATE_AP
5332 rtx addr;
5333
5334 if (fp->x_arg_pointer_save_area == 0)
5335 fp->x_arg_pointer_save_area
5336 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5337
5338 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5339 addr = memory_address (Pmode, addr);
5340
5341 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5342 #else
5343 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5344 base = lookup_static_chain (var);
5345 #endif
5346 }
5347
5348 else if (basereg == virtual_stack_vars_rtx)
5349 {
5350 /* This is the same code as lookup_static_chain, duplicated here to
5351 avoid an extra call to decl_function_context. */
5352 tree link;
5353
5354 for (link = context_display; link; link = TREE_CHAIN (link))
5355 if (TREE_PURPOSE (link) == context)
5356 {
5357 base = RTL_EXPR_RTL (TREE_VALUE (link));
5358 break;
5359 }
5360 }
5361
5362 if (base == 0)
5363 abort ();
5364
5365 /* Use same offset, relative to appropriate static chain or argument
5366 pointer. */
5367 return plus_constant (base, displacement);
5368 }
5369 \f
5370 /* Return the address of the trampoline for entering nested fn FUNCTION.
5371 If necessary, allocate a trampoline (in the stack frame)
5372 and emit rtl to initialize its contents (at entry to this function). */
5373
5374 rtx
5375 trampoline_address (function)
5376 tree function;
5377 {
5378 tree link;
5379 tree rtlexp;
5380 rtx tramp;
5381 struct function *fp;
5382 tree fn_context;
5383
5384 /* Find an existing trampoline and return it. */
5385 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5386 if (TREE_PURPOSE (link) == function)
5387 return
5388 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5389
5390 for (fp = outer_function_chain; fp; fp = fp->next)
5391 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5392 if (TREE_PURPOSE (link) == function)
5393 {
5394 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5395 function);
5396 return round_trampoline_addr (tramp);
5397 }
5398
5399 /* None exists; we must make one. */
5400
5401 /* Find the `struct function' for the function containing FUNCTION. */
5402 fp = 0;
5403 fn_context = decl_function_context (function);
5404 if (fn_context != current_function_decl
5405 && fn_context != inline_function_decl)
5406 for (fp = outer_function_chain; fp; fp = fp->next)
5407 if (fp->decl == fn_context)
5408 break;
5409
5410 /* Allocate run-time space for this trampoline
5411 (usually in the defining function's stack frame). */
5412 #ifdef ALLOCATE_TRAMPOLINE
5413 tramp = ALLOCATE_TRAMPOLINE (fp);
5414 #else
5415 /* If rounding needed, allocate extra space
5416 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5417 #ifdef TRAMPOLINE_ALIGNMENT
5418 #define TRAMPOLINE_REAL_SIZE \
5419 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5420 #else
5421 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5422 #endif
5423 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5424 fp ? fp : cfun);
5425 #endif
5426
5427 /* Record the trampoline for reuse and note it for later initialization
5428 by expand_function_end. */
5429 if (fp != 0)
5430 {
5431 push_obstacks (fp->function_maybepermanent_obstack,
5432 fp->function_maybepermanent_obstack);
5433 rtlexp = make_node (RTL_EXPR);
5434 RTL_EXPR_RTL (rtlexp) = tramp;
5435 fp->x_trampoline_list = tree_cons (function, rtlexp,
5436 fp->x_trampoline_list);
5437 pop_obstacks ();
5438 }
5439 else
5440 {
5441 /* Make the RTL_EXPR node temporary, not momentary, so that the
5442 trampoline_list doesn't become garbage. */
5443 int momentary = suspend_momentary ();
5444 rtlexp = make_node (RTL_EXPR);
5445 resume_momentary (momentary);
5446
5447 RTL_EXPR_RTL (rtlexp) = tramp;
5448 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5449 }
5450
5451 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5452 return round_trampoline_addr (tramp);
5453 }
5454
5455 /* Given a trampoline address,
5456 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5457
5458 static rtx
5459 round_trampoline_addr (tramp)
5460 rtx tramp;
5461 {
5462 #ifdef TRAMPOLINE_ALIGNMENT
5463 /* Round address up to desired boundary. */
5464 rtx temp = gen_reg_rtx (Pmode);
5465 temp = expand_binop (Pmode, add_optab, tramp,
5466 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5467 temp, 0, OPTAB_LIB_WIDEN);
5468 tramp = expand_binop (Pmode, and_optab, temp,
5469 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5470 temp, 0, OPTAB_LIB_WIDEN);
5471 #endif
5472 return tramp;
5473 }
5474 \f
5475 /* The functions identify_blocks and reorder_blocks provide a way to
5476 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5477 duplicate portions of the RTL code. Call identify_blocks before
5478 changing the RTL, and call reorder_blocks after. */
5479
5480 /* Put all this function's BLOCK nodes including those that are chained
5481 onto the first block into a vector, and return it.
5482 Also store in each NOTE for the beginning or end of a block
5483 the index of that block in the vector.
5484 The arguments are BLOCK, the chain of top-level blocks of the function,
5485 and INSNS, the insn chain of the function. */
5486
5487 void
5488 identify_blocks (block, insns)
5489 tree block;
5490 rtx insns;
5491 {
5492 int n_blocks;
5493 tree *block_vector;
5494 tree *block_stack;
5495 int depth = 0;
5496 int current_block_number = 1;
5497 rtx insn;
5498
5499 if (block == 0)
5500 return;
5501
5502 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5503 depth-first order. */
5504 n_blocks = all_blocks (block, 0);
5505 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5506 all_blocks (block, block_vector);
5507
5508 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5509
5510 for (insn = insns; insn; insn = NEXT_INSN (insn))
5511 if (GET_CODE (insn) == NOTE)
5512 {
5513 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5514 {
5515 tree b;
5516
5517 /* If there are more block notes than BLOCKs, something
5518 is badly wrong. */
5519 if (current_block_number == n_blocks)
5520 abort ();
5521
5522 b = block_vector[current_block_number++];
5523 NOTE_BLOCK (insn) = b;
5524 block_stack[depth++] = b;
5525 }
5526 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5527 {
5528 if (depth == 0)
5529 /* There are more NOTE_INSN_BLOCK_ENDs that
5530 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5531 abort ();
5532
5533 NOTE_BLOCK (insn) = block_stack[--depth];
5534 }
5535 }
5536
5537 /* In whole-function mode, we might not have seen the whole function
5538 yet, so we might not use up all the blocks. */
5539 if (n_blocks != current_block_number
5540 && !cfun->x_whole_function_mode_p)
5541 abort ();
5542
5543 free (block_vector);
5544 free (block_stack);
5545 }
5546
5547 /* Given a revised instruction chain, rebuild the tree structure of
5548 BLOCK nodes to correspond to the new order of RTL. The new block
5549 tree is inserted below TOP_BLOCK. Returns the current top-level
5550 block. */
5551
5552 tree
5553 reorder_blocks (block, insns)
5554 tree block;
5555 rtx insns;
5556 {
5557 tree current_block = block;
5558 rtx insn;
5559
5560 if (block == NULL_TREE)
5561 return NULL_TREE;
5562
5563 /* Prune the old trees away, so that it doesn't get in the way. */
5564 BLOCK_SUBBLOCKS (current_block) = 0;
5565 BLOCK_CHAIN (current_block) = 0;
5566
5567 for (insn = insns; insn; insn = NEXT_INSN (insn))
5568 if (GET_CODE (insn) == NOTE)
5569 {
5570 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5571 {
5572 tree block = NOTE_BLOCK (insn);
5573 /* If we have seen this block before, copy it. */
5574 if (TREE_ASM_WRITTEN (block))
5575 block = copy_node (block);
5576 BLOCK_SUBBLOCKS (block) = 0;
5577 TREE_ASM_WRITTEN (block) = 1;
5578 BLOCK_SUPERCONTEXT (block) = current_block;
5579 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5580 BLOCK_SUBBLOCKS (current_block) = block;
5581 current_block = block;
5582 }
5583 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5584 {
5585 BLOCK_SUBBLOCKS (current_block)
5586 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5587 current_block = BLOCK_SUPERCONTEXT (current_block);
5588 }
5589 }
5590
5591 BLOCK_SUBBLOCKS (current_block)
5592 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5593 return current_block;
5594 }
5595
5596 /* Reverse the order of elements in the chain T of blocks,
5597 and return the new head of the chain (old last element). */
5598
5599 static tree
5600 blocks_nreverse (t)
5601 tree t;
5602 {
5603 register tree prev = 0, decl, next;
5604 for (decl = t; decl; decl = next)
5605 {
5606 next = BLOCK_CHAIN (decl);
5607 BLOCK_CHAIN (decl) = prev;
5608 prev = decl;
5609 }
5610 return prev;
5611 }
5612
5613 /* Count the subblocks of the list starting with BLOCK, and list them
5614 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5615 blocks. */
5616
5617 static int
5618 all_blocks (block, vector)
5619 tree block;
5620 tree *vector;
5621 {
5622 int n_blocks = 0;
5623
5624 while (block)
5625 {
5626 TREE_ASM_WRITTEN (block) = 0;
5627
5628 /* Record this block. */
5629 if (vector)
5630 vector[n_blocks] = block;
5631
5632 ++n_blocks;
5633
5634 /* Record the subblocks, and their subblocks... */
5635 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5636 vector ? vector + n_blocks : 0);
5637 block = BLOCK_CHAIN (block);
5638 }
5639
5640 return n_blocks;
5641 }
5642 \f
5643 /* Allocate a function structure and reset its contents to the defaults. */
5644 static void
5645 prepare_function_start ()
5646 {
5647 cfun = (struct function *) xcalloc (1, sizeof (struct function));
5648
5649 init_stmt_for_function ();
5650 init_eh_for_function ();
5651
5652 cse_not_expected = ! optimize;
5653
5654 /* Caller save not needed yet. */
5655 caller_save_needed = 0;
5656
5657 /* No stack slots have been made yet. */
5658 stack_slot_list = 0;
5659
5660 current_function_has_nonlocal_label = 0;
5661 current_function_has_nonlocal_goto = 0;
5662
5663 /* There is no stack slot for handling nonlocal gotos. */
5664 nonlocal_goto_handler_slots = 0;
5665 nonlocal_goto_stack_level = 0;
5666
5667 /* No labels have been declared for nonlocal use. */
5668 nonlocal_labels = 0;
5669 nonlocal_goto_handler_labels = 0;
5670
5671 /* No function calls so far in this function. */
5672 function_call_count = 0;
5673
5674 /* No parm regs have been allocated.
5675 (This is important for output_inline_function.) */
5676 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5677
5678 /* Initialize the RTL mechanism. */
5679 init_emit ();
5680
5681 /* Initialize the queue of pending postincrement and postdecrements,
5682 and some other info in expr.c. */
5683 init_expr ();
5684
5685 /* We haven't done register allocation yet. */
5686 reg_renumber = 0;
5687
5688 init_varasm_status (cfun);
5689
5690 /* Clear out data used for inlining. */
5691 cfun->inlinable = 0;
5692 cfun->original_decl_initial = 0;
5693 cfun->original_arg_vector = 0;
5694
5695 cfun->stack_alignment_needed = 0;
5696 #ifdef STACK_BOUNDARY
5697 cfun->preferred_stack_boundary = STACK_BOUNDARY;
5698 #endif
5699
5700 /* Set if a call to setjmp is seen. */
5701 current_function_calls_setjmp = 0;
5702
5703 /* Set if a call to longjmp is seen. */
5704 current_function_calls_longjmp = 0;
5705
5706 current_function_calls_alloca = 0;
5707 current_function_contains_functions = 0;
5708 current_function_is_leaf = 0;
5709 current_function_sp_is_unchanging = 0;
5710 current_function_uses_only_leaf_regs = 0;
5711 current_function_has_computed_jump = 0;
5712 current_function_is_thunk = 0;
5713
5714 current_function_returns_pcc_struct = 0;
5715 current_function_returns_struct = 0;
5716 current_function_epilogue_delay_list = 0;
5717 current_function_uses_const_pool = 0;
5718 current_function_uses_pic_offset_table = 0;
5719 current_function_cannot_inline = 0;
5720
5721 /* We have not yet needed to make a label to jump to for tail-recursion. */
5722 tail_recursion_label = 0;
5723
5724 /* We haven't had a need to make a save area for ap yet. */
5725 arg_pointer_save_area = 0;
5726
5727 /* No stack slots allocated yet. */
5728 frame_offset = 0;
5729
5730 /* No SAVE_EXPRs in this function yet. */
5731 save_expr_regs = 0;
5732
5733 /* No RTL_EXPRs in this function yet. */
5734 rtl_expr_chain = 0;
5735
5736 /* Set up to allocate temporaries. */
5737 init_temp_slots ();
5738
5739 /* Indicate that we need to distinguish between the return value of the
5740 present function and the return value of a function being called. */
5741 rtx_equal_function_value_matters = 1;
5742
5743 /* Indicate that we have not instantiated virtual registers yet. */
5744 virtuals_instantiated = 0;
5745
5746 /* Indicate we have no need of a frame pointer yet. */
5747 frame_pointer_needed = 0;
5748
5749 /* By default assume not varargs or stdarg. */
5750 current_function_varargs = 0;
5751 current_function_stdarg = 0;
5752
5753 /* We haven't made any trampolines for this function yet. */
5754 trampoline_list = 0;
5755
5756 init_pending_stack_adjust ();
5757 inhibit_defer_pop = 0;
5758
5759 current_function_outgoing_args_size = 0;
5760
5761 if (init_lang_status)
5762 (*init_lang_status) (cfun);
5763 if (init_machine_status)
5764 (*init_machine_status) (cfun);
5765 }
5766
5767 /* Initialize the rtl expansion mechanism so that we can do simple things
5768 like generate sequences. This is used to provide a context during global
5769 initialization of some passes. */
5770 void
5771 init_dummy_function_start ()
5772 {
5773 prepare_function_start ();
5774 }
5775
5776 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5777 and initialize static variables for generating RTL for the statements
5778 of the function. */
5779
5780 void
5781 init_function_start (subr, filename, line)
5782 tree subr;
5783 char *filename;
5784 int line;
5785 {
5786 prepare_function_start ();
5787
5788 /* Remember this function for later. */
5789 cfun->next_global = all_functions;
5790 all_functions = cfun;
5791
5792 current_function_name = (*decl_printable_name) (subr, 2);
5793 cfun->decl = subr;
5794
5795 /* Nonzero if this is a nested function that uses a static chain. */
5796
5797 current_function_needs_context
5798 = (decl_function_context (current_function_decl) != 0
5799 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5800
5801 /* Within function body, compute a type's size as soon it is laid out. */
5802 immediate_size_expand++;
5803
5804 /* Prevent ever trying to delete the first instruction of a function.
5805 Also tell final how to output a linenum before the function prologue.
5806 Note linenums could be missing, e.g. when compiling a Java .class file. */
5807 if (line > 0)
5808 emit_line_note (filename, line);
5809
5810 /* Make sure first insn is a note even if we don't want linenums.
5811 This makes sure the first insn will never be deleted.
5812 Also, final expects a note to appear there. */
5813 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5814
5815 /* Set flags used by final.c. */
5816 if (aggregate_value_p (DECL_RESULT (subr)))
5817 {
5818 #ifdef PCC_STATIC_STRUCT_RETURN
5819 current_function_returns_pcc_struct = 1;
5820 #endif
5821 current_function_returns_struct = 1;
5822 }
5823
5824 /* Warn if this value is an aggregate type,
5825 regardless of which calling convention we are using for it. */
5826 if (warn_aggregate_return
5827 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5828 warning ("function returns an aggregate");
5829
5830 current_function_returns_pointer
5831 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5832 }
5833
5834 /* Make sure all values used by the optimization passes have sane
5835 defaults. */
5836 void
5837 init_function_for_compilation ()
5838 {
5839 reg_renumber = 0;
5840 /* No prologue/epilogue insns yet. */
5841 prologue = epilogue = 0;
5842 }
5843
5844 /* Indicate that the current function uses extra args
5845 not explicitly mentioned in the argument list in any fashion. */
5846
5847 void
5848 mark_varargs ()
5849 {
5850 current_function_varargs = 1;
5851 }
5852
5853 /* Expand a call to __main at the beginning of a possible main function. */
5854
5855 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5856 #undef HAS_INIT_SECTION
5857 #define HAS_INIT_SECTION
5858 #endif
5859
5860 void
5861 expand_main_function ()
5862 {
5863 #if !defined (HAS_INIT_SECTION)
5864 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5865 VOIDmode, 0);
5866 #endif /* not HAS_INIT_SECTION */
5867 }
5868 \f
5869 extern struct obstack permanent_obstack;
5870
5871 /* Start the RTL for a new function, and set variables used for
5872 emitting RTL.
5873 SUBR is the FUNCTION_DECL node.
5874 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5875 the function's parameters, which must be run at any return statement. */
5876
5877 void
5878 expand_function_start (subr, parms_have_cleanups)
5879 tree subr;
5880 int parms_have_cleanups;
5881 {
5882 tree tem;
5883 rtx last_ptr = NULL_RTX;
5884
5885 /* Make sure volatile mem refs aren't considered
5886 valid operands of arithmetic insns. */
5887 init_recog_no_volatile ();
5888
5889 /* Set this before generating any memory accesses. */
5890 current_function_check_memory_usage
5891 = (flag_check_memory_usage
5892 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5893
5894 current_function_instrument_entry_exit
5895 = (flag_instrument_function_entry_exit
5896 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5897
5898 current_function_limit_stack
5899 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5900
5901 /* If function gets a static chain arg, store it in the stack frame.
5902 Do this first, so it gets the first stack slot offset. */
5903 if (current_function_needs_context)
5904 {
5905 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5906
5907 /* Delay copying static chain if it is not a register to avoid
5908 conflicts with regs used for parameters. */
5909 if (! SMALL_REGISTER_CLASSES
5910 || GET_CODE (static_chain_incoming_rtx) == REG)
5911 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5912 }
5913
5914 /* If the parameters of this function need cleaning up, get a label
5915 for the beginning of the code which executes those cleanups. This must
5916 be done before doing anything with return_label. */
5917 if (parms_have_cleanups)
5918 cleanup_label = gen_label_rtx ();
5919 else
5920 cleanup_label = 0;
5921
5922 /* Make the label for return statements to jump to, if this machine
5923 does not have a one-instruction return and uses an epilogue,
5924 or if it returns a structure, or if it has parm cleanups. */
5925 #ifdef HAVE_return
5926 if (cleanup_label == 0 && HAVE_return
5927 && ! current_function_instrument_entry_exit
5928 && ! current_function_returns_pcc_struct
5929 && ! (current_function_returns_struct && ! optimize))
5930 return_label = 0;
5931 else
5932 return_label = gen_label_rtx ();
5933 #else
5934 return_label = gen_label_rtx ();
5935 #endif
5936
5937 /* Initialize rtx used to return the value. */
5938 /* Do this before assign_parms so that we copy the struct value address
5939 before any library calls that assign parms might generate. */
5940
5941 /* Decide whether to return the value in memory or in a register. */
5942 if (aggregate_value_p (DECL_RESULT (subr)))
5943 {
5944 /* Returning something that won't go in a register. */
5945 register rtx value_address = 0;
5946
5947 #ifdef PCC_STATIC_STRUCT_RETURN
5948 if (current_function_returns_pcc_struct)
5949 {
5950 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5951 value_address = assemble_static_space (size);
5952 }
5953 else
5954 #endif
5955 {
5956 /* Expect to be passed the address of a place to store the value.
5957 If it is passed as an argument, assign_parms will take care of
5958 it. */
5959 if (struct_value_incoming_rtx)
5960 {
5961 value_address = gen_reg_rtx (Pmode);
5962 emit_move_insn (value_address, struct_value_incoming_rtx);
5963 }
5964 }
5965 if (value_address)
5966 {
5967 DECL_RTL (DECL_RESULT (subr))
5968 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5969 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5970 AGGREGATE_TYPE_P (TREE_TYPE
5971 (DECL_RESULT
5972 (subr))));
5973 }
5974 }
5975 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5976 /* If return mode is void, this decl rtl should not be used. */
5977 DECL_RTL (DECL_RESULT (subr)) = 0;
5978 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5979 {
5980 /* If function will end with cleanup code for parms,
5981 compute the return values into a pseudo reg,
5982 which we will copy into the true return register
5983 after the cleanups are done. */
5984
5985 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5986
5987 #ifdef PROMOTE_FUNCTION_RETURN
5988 tree type = TREE_TYPE (DECL_RESULT (subr));
5989 int unsignedp = TREE_UNSIGNED (type);
5990
5991 mode = promote_mode (type, mode, &unsignedp, 1);
5992 #endif
5993
5994 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5995 }
5996 else
5997 /* Scalar, returned in a register. */
5998 {
5999 #ifdef FUNCTION_OUTGOING_VALUE
6000 DECL_RTL (DECL_RESULT (subr))
6001 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6002 #else
6003 DECL_RTL (DECL_RESULT (subr))
6004 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
6005 #endif
6006
6007 /* Mark this reg as the function's return value. */
6008 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6009 {
6010 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6011 /* Needed because we may need to move this to memory
6012 in case it's a named return value whose address is taken. */
6013 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6014 }
6015 }
6016
6017 /* Initialize rtx for parameters and local variables.
6018 In some cases this requires emitting insns. */
6019
6020 assign_parms (subr);
6021
6022 /* Copy the static chain now if it wasn't a register. The delay is to
6023 avoid conflicts with the parameter passing registers. */
6024
6025 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6026 if (GET_CODE (static_chain_incoming_rtx) != REG)
6027 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6028
6029 /* The following was moved from init_function_start.
6030 The move is supposed to make sdb output more accurate. */
6031 /* Indicate the beginning of the function body,
6032 as opposed to parm setup. */
6033 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6034
6035 if (GET_CODE (get_last_insn ()) != NOTE)
6036 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6037 parm_birth_insn = get_last_insn ();
6038
6039 context_display = 0;
6040 if (current_function_needs_context)
6041 {
6042 /* Fetch static chain values for containing functions. */
6043 tem = decl_function_context (current_function_decl);
6044 /* Copy the static chain pointer into a pseudo. If we have
6045 small register classes, copy the value from memory if
6046 static_chain_incoming_rtx is a REG. */
6047 if (tem)
6048 {
6049 /* If the static chain originally came in a register, put it back
6050 there, then move it out in the next insn. The reason for
6051 this peculiar code is to satisfy function integration. */
6052 if (SMALL_REGISTER_CLASSES
6053 && GET_CODE (static_chain_incoming_rtx) == REG)
6054 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6055 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6056 }
6057
6058 while (tem)
6059 {
6060 tree rtlexp = make_node (RTL_EXPR);
6061
6062 RTL_EXPR_RTL (rtlexp) = last_ptr;
6063 context_display = tree_cons (tem, rtlexp, context_display);
6064 tem = decl_function_context (tem);
6065 if (tem == 0)
6066 break;
6067 /* Chain thru stack frames, assuming pointer to next lexical frame
6068 is found at the place we always store it. */
6069 #ifdef FRAME_GROWS_DOWNWARD
6070 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6071 #endif
6072 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6073 memory_address (Pmode,
6074 last_ptr)));
6075
6076 /* If we are not optimizing, ensure that we know that this
6077 piece of context is live over the entire function. */
6078 if (! optimize)
6079 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6080 save_expr_regs);
6081 }
6082 }
6083
6084 if (current_function_instrument_entry_exit)
6085 {
6086 rtx fun = DECL_RTL (current_function_decl);
6087 if (GET_CODE (fun) == MEM)
6088 fun = XEXP (fun, 0);
6089 else
6090 abort ();
6091 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6092 fun, Pmode,
6093 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6094 0,
6095 hard_frame_pointer_rtx),
6096 Pmode);
6097 }
6098
6099 /* After the display initializations is where the tail-recursion label
6100 should go, if we end up needing one. Ensure we have a NOTE here
6101 since some things (like trampolines) get placed before this. */
6102 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6103
6104 /* Evaluate now the sizes of any types declared among the arguments. */
6105 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6106 {
6107 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6108 EXPAND_MEMORY_USE_BAD);
6109 /* Flush the queue in case this parameter declaration has
6110 side-effects. */
6111 emit_queue ();
6112 }
6113
6114 /* Make sure there is a line number after the function entry setup code. */
6115 force_next_line_note ();
6116 }
6117 \f
6118 /* Undo the effects of init_dummy_function_start. */
6119 void
6120 expand_dummy_function_end ()
6121 {
6122 /* End any sequences that failed to be closed due to syntax errors. */
6123 while (in_sequence_p ())
6124 end_sequence ();
6125
6126 /* Outside function body, can't compute type's actual size
6127 until next function's body starts. */
6128
6129 free_after_parsing (cfun);
6130 free_after_compilation (cfun);
6131 free (cfun);
6132 cfun = 0;
6133 }
6134
6135 /* Call DOIT for each hard register used as a return value from
6136 the current function. */
6137
6138 void
6139 diddle_return_value (doit, arg)
6140 void (*doit) PARAMS ((rtx, void *));
6141 void *arg;
6142 {
6143 rtx outgoing = current_function_return_rtx;
6144
6145 if (! outgoing)
6146 return;
6147
6148 if (GET_CODE (outgoing) == REG
6149 && REGNO (outgoing) >= FIRST_PSEUDO_REGISTER)
6150 {
6151 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6152 #ifdef FUNCTION_OUTGOING_VALUE
6153 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
6154 #else
6155 outgoing = FUNCTION_VALUE (type, current_function_decl);
6156 #endif
6157 /* If this is a BLKmode structure being returned in registers, then use
6158 the mode computed in expand_return. */
6159 if (GET_MODE (outgoing) == BLKmode)
6160 PUT_MODE (outgoing,
6161 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6162 }
6163
6164 if (GET_CODE (outgoing) == REG)
6165 (*doit) (outgoing, arg);
6166 else if (GET_CODE (outgoing) == PARALLEL)
6167 {
6168 int i;
6169
6170 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6171 {
6172 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6173
6174 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6175 (*doit) (x, arg);
6176 }
6177 }
6178 }
6179
6180 static void
6181 do_clobber_return_reg (reg, arg)
6182 rtx reg;
6183 void *arg ATTRIBUTE_UNUSED;
6184 {
6185 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6186 }
6187
6188 void
6189 clobber_return_register ()
6190 {
6191 diddle_return_value (do_clobber_return_reg, NULL);
6192 }
6193
6194 static void
6195 do_use_return_reg (reg, arg)
6196 rtx reg;
6197 void *arg ATTRIBUTE_UNUSED;
6198 {
6199 emit_insn (gen_rtx_USE (VOIDmode, reg));
6200 }
6201
6202 void
6203 use_return_register ()
6204 {
6205 diddle_return_value (do_use_return_reg, NULL);
6206 }
6207
6208 /* Generate RTL for the end of the current function.
6209 FILENAME and LINE are the current position in the source file.
6210
6211 It is up to language-specific callers to do cleanups for parameters--
6212 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6213
6214 void
6215 expand_function_end (filename, line, end_bindings)
6216 char *filename;
6217 int line;
6218 int end_bindings;
6219 {
6220 tree link;
6221
6222 #ifdef TRAMPOLINE_TEMPLATE
6223 static rtx initial_trampoline;
6224 #endif
6225
6226 finish_expr_for_function ();
6227
6228 #ifdef NON_SAVING_SETJMP
6229 /* Don't put any variables in registers if we call setjmp
6230 on a machine that fails to restore the registers. */
6231 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6232 {
6233 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6234 setjmp_protect (DECL_INITIAL (current_function_decl));
6235
6236 setjmp_protect_args ();
6237 }
6238 #endif
6239
6240 /* Save the argument pointer if a save area was made for it. */
6241 if (arg_pointer_save_area)
6242 {
6243 /* arg_pointer_save_area may not be a valid memory address, so we
6244 have to check it and fix it if necessary. */
6245 rtx seq;
6246 start_sequence ();
6247 emit_move_insn (validize_mem (arg_pointer_save_area),
6248 virtual_incoming_args_rtx);
6249 seq = gen_sequence ();
6250 end_sequence ();
6251 emit_insn_before (seq, tail_recursion_reentry);
6252 }
6253
6254 /* Initialize any trampolines required by this function. */
6255 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6256 {
6257 tree function = TREE_PURPOSE (link);
6258 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6259 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6260 #ifdef TRAMPOLINE_TEMPLATE
6261 rtx blktramp;
6262 #endif
6263 rtx seq;
6264
6265 #ifdef TRAMPOLINE_TEMPLATE
6266 /* First make sure this compilation has a template for
6267 initializing trampolines. */
6268 if (initial_trampoline == 0)
6269 {
6270 end_temporary_allocation ();
6271 initial_trampoline
6272 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6273 resume_temporary_allocation ();
6274
6275 ggc_add_rtx_root (&initial_trampoline, 1);
6276 }
6277 #endif
6278
6279 /* Generate insns to initialize the trampoline. */
6280 start_sequence ();
6281 tramp = round_trampoline_addr (XEXP (tramp, 0));
6282 #ifdef TRAMPOLINE_TEMPLATE
6283 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6284 emit_block_move (blktramp, initial_trampoline,
6285 GEN_INT (TRAMPOLINE_SIZE),
6286 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6287 #endif
6288 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6289 seq = get_insns ();
6290 end_sequence ();
6291
6292 /* Put those insns at entry to the containing function (this one). */
6293 emit_insns_before (seq, tail_recursion_reentry);
6294 }
6295
6296 /* If we are doing stack checking and this function makes calls,
6297 do a stack probe at the start of the function to ensure we have enough
6298 space for another stack frame. */
6299 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6300 {
6301 rtx insn, seq;
6302
6303 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6304 if (GET_CODE (insn) == CALL_INSN)
6305 {
6306 start_sequence ();
6307 probe_stack_range (STACK_CHECK_PROTECT,
6308 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6309 seq = get_insns ();
6310 end_sequence ();
6311 emit_insns_before (seq, tail_recursion_reentry);
6312 break;
6313 }
6314 }
6315
6316 /* Warn about unused parms if extra warnings were specified. */
6317 if (warn_unused && extra_warnings)
6318 {
6319 tree decl;
6320
6321 for (decl = DECL_ARGUMENTS (current_function_decl);
6322 decl; decl = TREE_CHAIN (decl))
6323 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6324 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6325 warning_with_decl (decl, "unused parameter `%s'");
6326 }
6327
6328 /* Delete handlers for nonlocal gotos if nothing uses them. */
6329 if (nonlocal_goto_handler_slots != 0
6330 && ! current_function_has_nonlocal_label)
6331 delete_handlers ();
6332
6333 /* End any sequences that failed to be closed due to syntax errors. */
6334 while (in_sequence_p ())
6335 end_sequence ();
6336
6337 /* Outside function body, can't compute type's actual size
6338 until next function's body starts. */
6339 immediate_size_expand--;
6340
6341 clear_pending_stack_adjust ();
6342 do_pending_stack_adjust ();
6343
6344 /* Mark the end of the function body.
6345 If control reaches this insn, the function can drop through
6346 without returning a value. */
6347 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6348
6349 /* Must mark the last line number note in the function, so that the test
6350 coverage code can avoid counting the last line twice. This just tells
6351 the code to ignore the immediately following line note, since there
6352 already exists a copy of this note somewhere above. This line number
6353 note is still needed for debugging though, so we can't delete it. */
6354 if (flag_test_coverage)
6355 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6356
6357 /* Output a linenumber for the end of the function.
6358 SDB depends on this. */
6359 emit_line_note_force (filename, line);
6360
6361 /* Output the label for the actual return from the function,
6362 if one is expected. This happens either because a function epilogue
6363 is used instead of a return instruction, or because a return was done
6364 with a goto in order to run local cleanups, or because of pcc-style
6365 structure returning. */
6366
6367 if (return_label)
6368 {
6369 /* Before the return label, clobber the return registers so that
6370 they are not propogated live to the rest of the function. This
6371 can only happen with functions that drop through; if there had
6372 been a return statement, there would have either been a return
6373 rtx, or a jump to the return label. */
6374 clobber_return_register ();
6375
6376 emit_label (return_label);
6377 }
6378
6379 /* C++ uses this. */
6380 if (end_bindings)
6381 expand_end_bindings (0, 0, 0);
6382
6383 /* Now handle any leftover exception regions that may have been
6384 created for the parameters. */
6385 {
6386 rtx last = get_last_insn ();
6387 rtx label;
6388
6389 expand_leftover_cleanups ();
6390
6391 /* If there are any catch_clauses remaining, output them now. */
6392 emit_insns (catch_clauses);
6393 catch_clauses = NULL_RTX;
6394 /* If the above emitted any code, may sure we jump around it. */
6395 if (last != get_last_insn ())
6396 {
6397 label = gen_label_rtx ();
6398 last = emit_jump_insn_after (gen_jump (label), last);
6399 last = emit_barrier_after (last);
6400 emit_label (label);
6401 }
6402 }
6403
6404 if (current_function_instrument_entry_exit)
6405 {
6406 rtx fun = DECL_RTL (current_function_decl);
6407 if (GET_CODE (fun) == MEM)
6408 fun = XEXP (fun, 0);
6409 else
6410 abort ();
6411 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6412 fun, Pmode,
6413 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6414 0,
6415 hard_frame_pointer_rtx),
6416 Pmode);
6417 }
6418
6419 /* If we had calls to alloca, and this machine needs
6420 an accurate stack pointer to exit the function,
6421 insert some code to save and restore the stack pointer. */
6422 #ifdef EXIT_IGNORE_STACK
6423 if (! EXIT_IGNORE_STACK)
6424 #endif
6425 if (current_function_calls_alloca)
6426 {
6427 rtx tem = 0;
6428
6429 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6430 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6431 }
6432
6433 /* If scalar return value was computed in a pseudo-reg,
6434 copy that to the hard return register. */
6435 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6436 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6437 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6438 >= FIRST_PSEUDO_REGISTER))
6439 {
6440 rtx real_decl_result;
6441
6442 #ifdef FUNCTION_OUTGOING_VALUE
6443 real_decl_result
6444 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6445 current_function_decl);
6446 #else
6447 real_decl_result
6448 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6449 current_function_decl);
6450 #endif
6451 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6452 /* If this is a BLKmode structure being returned in registers, then use
6453 the mode computed in expand_return. */
6454 if (GET_MODE (real_decl_result) == BLKmode)
6455 PUT_MODE (real_decl_result,
6456 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6457 emit_move_insn (real_decl_result,
6458 DECL_RTL (DECL_RESULT (current_function_decl)));
6459
6460 /* The delay slot scheduler assumes that current_function_return_rtx
6461 holds the hard register containing the return value, not a temporary
6462 pseudo. */
6463 current_function_return_rtx = real_decl_result;
6464 }
6465
6466 /* If returning a structure, arrange to return the address of the value
6467 in a place where debuggers expect to find it.
6468
6469 If returning a structure PCC style,
6470 the caller also depends on this value.
6471 And current_function_returns_pcc_struct is not necessarily set. */
6472 if (current_function_returns_struct
6473 || current_function_returns_pcc_struct)
6474 {
6475 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6476 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6477 #ifdef FUNCTION_OUTGOING_VALUE
6478 rtx outgoing
6479 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6480 current_function_decl);
6481 #else
6482 rtx outgoing
6483 = FUNCTION_VALUE (build_pointer_type (type),
6484 current_function_decl);
6485 #endif
6486
6487 /* Mark this as a function return value so integrate will delete the
6488 assignment and USE below when inlining this function. */
6489 REG_FUNCTION_VALUE_P (outgoing) = 1;
6490
6491 emit_move_insn (outgoing, value_address);
6492 }
6493
6494 /* ??? This should no longer be necessary since stupid is no longer with
6495 us, but there are some parts of the compiler (eg reload_combine, and
6496 sh mach_dep_reorg) that still try and compute their own lifetime info
6497 instead of using the general framework. */
6498 use_return_register ();
6499
6500 /* If this is an implementation of __throw, do what's necessary to
6501 communicate between __builtin_eh_return and the epilogue. */
6502 expand_eh_return ();
6503
6504 /* Output a return insn if we are using one.
6505 Otherwise, let the rtl chain end here, to drop through
6506 into the epilogue. */
6507
6508 #ifdef HAVE_return
6509 if (HAVE_return)
6510 {
6511 emit_jump_insn (gen_return ());
6512 emit_barrier ();
6513 }
6514 #endif
6515
6516 /* Fix up any gotos that jumped out to the outermost
6517 binding level of the function.
6518 Must follow emitting RETURN_LABEL. */
6519
6520 /* If you have any cleanups to do at this point,
6521 and they need to create temporary variables,
6522 then you will lose. */
6523 expand_fixups (get_insns ());
6524 }
6525 \f
6526 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6527 or a single insn). */
6528
6529 static int *
6530 record_insns (insns)
6531 rtx insns;
6532 {
6533 int *vec;
6534
6535 if (GET_CODE (insns) == SEQUENCE)
6536 {
6537 int len = XVECLEN (insns, 0);
6538 vec = (int *) oballoc ((len + 1) * sizeof (int));
6539 vec[len] = 0;
6540 while (--len >= 0)
6541 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6542 }
6543 else
6544 {
6545 vec = (int *) oballoc (2 * sizeof (int));
6546 vec[0] = INSN_UID (insns);
6547 vec[1] = 0;
6548 }
6549 return vec;
6550 }
6551
6552 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6553
6554 static int
6555 contains (insn, vec)
6556 rtx insn;
6557 int *vec;
6558 {
6559 register int i, j;
6560
6561 if (GET_CODE (insn) == INSN
6562 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6563 {
6564 int count = 0;
6565 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6566 for (j = 0; vec[j]; j++)
6567 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6568 count++;
6569 return count;
6570 }
6571 else
6572 {
6573 for (j = 0; vec[j]; j++)
6574 if (INSN_UID (insn) == vec[j])
6575 return 1;
6576 }
6577 return 0;
6578 }
6579
6580 int
6581 prologue_epilogue_contains (insn)
6582 rtx insn;
6583 {
6584 if (prologue && contains (insn, prologue))
6585 return 1;
6586 if (epilogue && contains (insn, epilogue))
6587 return 1;
6588 return 0;
6589 }
6590
6591 #ifdef HAVE_return
6592 /* Insert gen_return at the end of block BB. This also means updating
6593 block_for_insn appropriately. */
6594
6595 static void
6596 emit_return_into_block (bb)
6597 basic_block bb;
6598 {
6599 rtx p, end;
6600
6601 end = emit_jump_insn_after (gen_return (), bb->end);
6602 p = NEXT_INSN (bb->end);
6603 while (1)
6604 {
6605 set_block_for_insn (p, bb);
6606 if (p == end)
6607 break;
6608 p = NEXT_INSN (p);
6609 }
6610 bb->end = end;
6611 }
6612 #endif /* HAVE_return */
6613
6614 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6615 this into place with notes indicating where the prologue ends and where
6616 the epilogue begins. Update the basic block information when possible. */
6617
6618 void
6619 thread_prologue_and_epilogue_insns (f)
6620 rtx f ATTRIBUTE_UNUSED;
6621 {
6622 int insertted = 0;
6623 edge e;
6624 rtx seq;
6625
6626 #ifdef HAVE_prologue
6627 if (HAVE_prologue)
6628 {
6629 rtx insn;
6630
6631 start_sequence ();
6632 seq = gen_prologue();
6633 emit_insn (seq);
6634
6635 /* Retain a map of the prologue insns. */
6636 if (GET_CODE (seq) != SEQUENCE)
6637 seq = get_insns ();
6638 prologue = record_insns (seq);
6639 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6640
6641 /* GDB handles `break f' by setting a breakpoint on the first
6642 line note *after* the prologue. That means that we should
6643 insert a line note here; otherwise, if the next line note
6644 comes part way into the next block, GDB will skip all the way
6645 to that point. */
6646 insn = next_nonnote_insn (f);
6647 while (insn)
6648 {
6649 if (GET_CODE (insn) == NOTE
6650 && NOTE_LINE_NUMBER (insn) >= 0)
6651 {
6652 emit_line_note_force (NOTE_SOURCE_FILE (insn),
6653 NOTE_LINE_NUMBER (insn));
6654 break;
6655 }
6656
6657 insn = PREV_INSN (insn);
6658 }
6659
6660 seq = gen_sequence ();
6661 end_sequence ();
6662
6663 /* If optimization is off, and perhaps in an empty function,
6664 the entry block will have no successors. */
6665 if (ENTRY_BLOCK_PTR->succ)
6666 {
6667 /* Can't deal with multiple successsors of the entry block. */
6668 if (ENTRY_BLOCK_PTR->succ->succ_next)
6669 abort ();
6670
6671 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6672 insertted = 1;
6673 }
6674 else
6675 emit_insn_after (seq, f);
6676 }
6677 #endif
6678
6679 /* If the exit block has no non-fake predecessors, we don't need
6680 an epilogue. */
6681 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6682 if ((e->flags & EDGE_FAKE) == 0)
6683 break;
6684 if (e == NULL)
6685 goto epilogue_done;
6686
6687 #ifdef HAVE_return
6688 if (optimize && HAVE_return)
6689 {
6690 /* If we're allowed to generate a simple return instruction,
6691 then by definition we don't need a full epilogue. Examine
6692 the block that falls through to EXIT. If it does not
6693 contain any code, examine its predecessors and try to
6694 emit (conditional) return instructions. */
6695
6696 basic_block last;
6697 edge e_next;
6698 rtx label;
6699
6700 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6701 if (e->flags & EDGE_FALLTHRU)
6702 break;
6703 if (e == NULL)
6704 goto epilogue_done;
6705 last = e->src;
6706
6707 /* Verify that there are no active instructions in the last block. */
6708 label = last->end;
6709 while (label && GET_CODE (label) != CODE_LABEL)
6710 {
6711 if (active_insn_p (label))
6712 break;
6713 label = PREV_INSN (label);
6714 }
6715
6716 if (last->head == label && GET_CODE (label) == CODE_LABEL)
6717 {
6718 for (e = last->pred; e ; e = e_next)
6719 {
6720 basic_block bb = e->src;
6721 rtx jump;
6722
6723 e_next = e->pred_next;
6724 if (bb == ENTRY_BLOCK_PTR)
6725 continue;
6726
6727 jump = bb->end;
6728 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
6729 continue;
6730
6731 /* If we have an unconditional jump, we can replace that
6732 with a simple return instruction. */
6733 if (simplejump_p (jump))
6734 {
6735 emit_return_into_block (bb);
6736 flow_delete_insn (jump);
6737 }
6738
6739 /* If we have a conditional jump, we can try to replace
6740 that with a conditional return instruction. */
6741 else if (condjump_p (jump))
6742 {
6743 rtx ret, *loc;
6744
6745 ret = SET_SRC (PATTERN (jump));
6746 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
6747 loc = &XEXP (ret, 1);
6748 else
6749 loc = &XEXP (ret, 2);
6750 ret = gen_rtx_RETURN (VOIDmode);
6751
6752 if (! validate_change (jump, loc, ret, 0))
6753 continue;
6754 if (JUMP_LABEL (jump))
6755 LABEL_NUSES (JUMP_LABEL (jump))--;
6756
6757 /* If this block has only one successor, it both jumps
6758 and falls through to the fallthru block, so we can't
6759 delete the edge. */
6760 if (bb->succ->succ_next == NULL)
6761 continue;
6762 }
6763 else
6764 continue;
6765
6766 /* Fix up the CFG for the successful change we just made. */
6767 remove_edge (e);
6768 make_edge (NULL, bb, EXIT_BLOCK_PTR, 0);
6769 }
6770
6771 /* Emit a return insn for the exit fallthru block. Whether
6772 this is still reachable will be determined later. */
6773
6774 emit_barrier_after (last->end);
6775 emit_return_into_block (last);
6776 }
6777 else
6778 {
6779 /* The exit block wasn't empty. We have to use insert_insn_on_edge,
6780 as it may be the exit block can go elsewhere as well
6781 as exiting. */
6782 start_sequence ();
6783 emit_jump_insn (gen_return ());
6784 seq = gen_sequence ();
6785 end_sequence ();
6786 insert_insn_on_edge (seq, e);
6787 insertted = 1;
6788 }
6789 goto epilogue_done;
6790 }
6791 #endif
6792 #ifdef HAVE_epilogue
6793 if (HAVE_epilogue)
6794 {
6795 /* Find the edge that falls through to EXIT. Other edges may exist
6796 due to RETURN instructions, but those don't need epilogues.
6797 There really shouldn't be a mixture -- either all should have
6798 been converted or none, however... */
6799
6800 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6801 if (e->flags & EDGE_FALLTHRU)
6802 break;
6803 if (e == NULL)
6804 goto epilogue_done;
6805
6806 start_sequence ();
6807 emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
6808
6809 seq = gen_epilogue ();
6810 emit_jump_insn (seq);
6811
6812 /* Retain a map of the epilogue insns. */
6813 if (GET_CODE (seq) != SEQUENCE)
6814 seq = get_insns ();
6815 epilogue = record_insns (seq);
6816
6817 seq = gen_sequence ();
6818 end_sequence();
6819
6820 insert_insn_on_edge (seq, e);
6821 insertted = 1;
6822 }
6823 #endif
6824 epilogue_done:
6825
6826 if (insertted)
6827 commit_edge_insertions ();
6828 }
6829
6830 /* Reposition the prologue-end and epilogue-begin notes after instruction
6831 scheduling and delayed branch scheduling. */
6832
6833 void
6834 reposition_prologue_and_epilogue_notes (f)
6835 rtx f ATTRIBUTE_UNUSED;
6836 {
6837 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6838 /* Reposition the prologue and epilogue notes. */
6839 if (n_basic_blocks)
6840 {
6841 int len;
6842
6843 if (prologue)
6844 {
6845 register rtx insn, note = 0;
6846
6847 /* Scan from the beginning until we reach the last prologue insn.
6848 We apparently can't depend on basic_block_{head,end} after
6849 reorg has run. */
6850 for (len = 0; prologue[len]; len++)
6851 ;
6852 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6853 {
6854 if (GET_CODE (insn) == NOTE)
6855 {
6856 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6857 note = insn;
6858 }
6859 else if ((len -= contains (insn, prologue)) == 0)
6860 {
6861 rtx next;
6862 /* Find the prologue-end note if we haven't already, and
6863 move it to just after the last prologue insn. */
6864 if (note == 0)
6865 {
6866 for (note = insn; (note = NEXT_INSN (note));)
6867 if (GET_CODE (note) == NOTE
6868 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6869 break;
6870 }
6871
6872 next = NEXT_INSN (note);
6873
6874 /* Whether or not we can depend on BLOCK_HEAD,
6875 attempt to keep it up-to-date. */
6876 if (BLOCK_HEAD (0) == note)
6877 BLOCK_HEAD (0) = next;
6878
6879 remove_insn (note);
6880 add_insn_after (note, insn);
6881 }
6882 }
6883 }
6884
6885 if (epilogue)
6886 {
6887 register rtx insn, note = 0;
6888
6889 /* Scan from the end until we reach the first epilogue insn.
6890 We apparently can't depend on basic_block_{head,end} after
6891 reorg has run. */
6892 for (len = 0; epilogue[len]; len++)
6893 ;
6894 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6895 {
6896 if (GET_CODE (insn) == NOTE)
6897 {
6898 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6899 note = insn;
6900 }
6901 else if ((len -= contains (insn, epilogue)) == 0)
6902 {
6903 /* Find the epilogue-begin note if we haven't already, and
6904 move it to just before the first epilogue insn. */
6905 if (note == 0)
6906 {
6907 for (note = insn; (note = PREV_INSN (note));)
6908 if (GET_CODE (note) == NOTE
6909 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6910 break;
6911 }
6912
6913 /* Whether or not we can depend on BLOCK_HEAD,
6914 attempt to keep it up-to-date. */
6915 if (n_basic_blocks
6916 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6917 BLOCK_HEAD (n_basic_blocks-1) = note;
6918
6919 remove_insn (note);
6920 add_insn_before (note, insn);
6921 }
6922 }
6923 }
6924 }
6925 #endif /* HAVE_prologue or HAVE_epilogue */
6926 }
6927
6928 /* Mark T for GC. */
6929
6930 static void
6931 mark_temp_slot (t)
6932 struct temp_slot *t;
6933 {
6934 while (t)
6935 {
6936 ggc_mark_rtx (t->slot);
6937 ggc_mark_rtx (t->address);
6938 ggc_mark_tree (t->rtl_expr);
6939
6940 t = t->next;
6941 }
6942 }
6943
6944 /* Mark P for GC. */
6945
6946 static void
6947 mark_function_status (p)
6948 struct function *p;
6949 {
6950 int i;
6951 rtx *r;
6952
6953 if (p == 0)
6954 return;
6955
6956 ggc_mark_rtx (p->arg_offset_rtx);
6957
6958 if (p->x_parm_reg_stack_loc)
6959 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
6960 i > 0; --i, ++r)
6961 ggc_mark_rtx (*r);
6962
6963 ggc_mark_rtx (p->return_rtx);
6964 ggc_mark_rtx (p->x_cleanup_label);
6965 ggc_mark_rtx (p->x_return_label);
6966 ggc_mark_rtx (p->x_save_expr_regs);
6967 ggc_mark_rtx (p->x_stack_slot_list);
6968 ggc_mark_rtx (p->x_parm_birth_insn);
6969 ggc_mark_rtx (p->x_tail_recursion_label);
6970 ggc_mark_rtx (p->x_tail_recursion_reentry);
6971 ggc_mark_rtx (p->internal_arg_pointer);
6972 ggc_mark_rtx (p->x_arg_pointer_save_area);
6973 ggc_mark_tree (p->x_rtl_expr_chain);
6974 ggc_mark_rtx (p->x_last_parm_insn);
6975 ggc_mark_tree (p->x_context_display);
6976 ggc_mark_tree (p->x_trampoline_list);
6977 ggc_mark_rtx (p->epilogue_delay_list);
6978
6979 mark_temp_slot (p->x_temp_slots);
6980
6981 {
6982 struct var_refs_queue *q = p->fixup_var_refs_queue;
6983 while (q)
6984 {
6985 ggc_mark_rtx (q->modified);
6986 q = q->next;
6987 }
6988 }
6989
6990 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
6991 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
6992 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
6993 ggc_mark_tree (p->x_nonlocal_labels);
6994 }
6995
6996 /* Mark the function chain ARG (which is really a struct function **)
6997 for GC. */
6998
6999 static void
7000 mark_function_chain (arg)
7001 void *arg;
7002 {
7003 struct function *f = *(struct function **) arg;
7004
7005 for (; f; f = f->next_global)
7006 {
7007 ggc_mark_tree (f->decl);
7008
7009 mark_function_status (f);
7010 mark_eh_status (f->eh);
7011 mark_stmt_status (f->stmt);
7012 mark_expr_status (f->expr);
7013 mark_emit_status (f->emit);
7014 mark_varasm_status (f->varasm);
7015
7016 if (mark_machine_status)
7017 (*mark_machine_status) (f);
7018 if (mark_lang_status)
7019 (*mark_lang_status) (f);
7020
7021 if (f->original_arg_vector)
7022 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7023 if (f->original_decl_initial)
7024 ggc_mark_tree (f->original_decl_initial);
7025 }
7026 }
7027
7028 /* Called once, at initialization, to initialize function.c. */
7029
7030 void
7031 init_function_once ()
7032 {
7033 ggc_add_root (&all_functions, 1, sizeof all_functions,
7034 mark_function_chain);
7035 }
This page took 0.358427 seconds and 6 git commands to generate.