]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
flow.c (attempt_auto_inc): Remove unused variable `bb'.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
27
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
31
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
35 not get a hard register.
36
37 Call `put_var_into_stack' when you learn, belatedly, that a variable
38 previously given a pseudo-register must in fact go in the stack.
39 This function changes the DECL_RTL to be a stack slot instead of a reg
40 then scans all the RTL instructions so far generated to correct them. */
41
42 #include "config.h"
43 #include "system.h"
44 #include "rtl.h"
45 #include "tree.h"
46 #include "flags.h"
47 #include "except.h"
48 #include "function.h"
49 #include "insn-flags.h"
50 #include "expr.h"
51 #include "insn-codes.h"
52 #include "regs.h"
53 #include "hard-reg-set.h"
54 #include "insn-config.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "basic-block.h"
58 #include "obstack.h"
59 #include "toplev.h"
60 #include "hash.h"
61 #include "ggc.h"
62 #include "tm_p.h"
63
64 #ifndef ACCUMULATE_OUTGOING_ARGS
65 #define ACCUMULATE_OUTGOING_ARGS 0
66 #endif
67
68 #ifndef TRAMPOLINE_ALIGNMENT
69 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
70 #endif
71
72 #ifndef LOCAL_ALIGNMENT
73 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
74 #endif
75
76 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
77 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
78 #endif
79
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82 give the same symbol without quotes for an alternative entry point. You
83 must define both, or neither. */
84 #ifndef NAME__MAIN
85 #define NAME__MAIN "__main"
86 #define SYMBOL__MAIN __main
87 #endif
88
89 /* Round a value to the lowest integer less than it that is a multiple of
90 the required alignment. Avoid using division in case the value is
91 negative. Assume the alignment is a power of two. */
92 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
93
94 /* Similar, but round to the next highest integer that meets the
95 alignment. */
96 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
97
98 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
99 during rtl generation. If they are different register numbers, this is
100 always true. It may also be true if
101 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
102 generation. See fix_lexical_addr for details. */
103
104 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
105 #define NEED_SEPARATE_AP
106 #endif
107
108 /* Nonzero if function being compiled doesn't contain any calls
109 (ignoring the prologue and epilogue). This is set prior to
110 local register allocation and is valid for the remaining
111 compiler passes. */
112 int current_function_is_leaf;
113
114 /* Nonzero if function being compiled doesn't contain any instructions
115 that can throw an exception. This is set prior to final. */
116
117 int current_function_nothrow;
118
119 /* Nonzero if function being compiled doesn't modify the stack pointer
120 (ignoring the prologue and epilogue). This is only valid after
121 life_analysis has run. */
122 int current_function_sp_is_unchanging;
123
124 /* Nonzero if the function being compiled is a leaf function which only
125 uses leaf registers. This is valid after reload (specifically after
126 sched2) and is useful only if the port defines LEAF_REGISTERS. */
127 int current_function_uses_only_leaf_regs;
128
129 /* Nonzero once virtual register instantiation has been done.
130 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
131 static int virtuals_instantiated;
132
133 /* These variables hold pointers to functions to
134 save and restore machine-specific data,
135 in push_function_context and pop_function_context. */
136 void (*init_machine_status) PARAMS ((struct function *));
137 void (*save_machine_status) PARAMS ((struct function *));
138 void (*restore_machine_status) PARAMS ((struct function *));
139 void (*mark_machine_status) PARAMS ((struct function *));
140 void (*free_machine_status) PARAMS ((struct function *));
141
142 /* Likewise, but for language-specific data. */
143 void (*init_lang_status) PARAMS ((struct function *));
144 void (*save_lang_status) PARAMS ((struct function *));
145 void (*restore_lang_status) PARAMS ((struct function *));
146 void (*mark_lang_status) PARAMS ((struct function *));
147 void (*free_lang_status) PARAMS ((struct function *));
148
149 /* The FUNCTION_DECL for an inline function currently being expanded. */
150 tree inline_function_decl;
151
152 /* The currently compiled function. */
153 struct function *cfun = 0;
154
155 /* Global list of all compiled functions. */
156 struct function *all_functions = 0;
157
158 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
159 static varray_type prologue;
160 static varray_type epilogue;
161
162 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
163 in this function. */
164 static varray_type sibcall_epilogue;
165 \f
166 /* In order to evaluate some expressions, such as function calls returning
167 structures in memory, we need to temporarily allocate stack locations.
168 We record each allocated temporary in the following structure.
169
170 Associated with each temporary slot is a nesting level. When we pop up
171 one level, all temporaries associated with the previous level are freed.
172 Normally, all temporaries are freed after the execution of the statement
173 in which they were created. However, if we are inside a ({...}) grouping,
174 the result may be in a temporary and hence must be preserved. If the
175 result could be in a temporary, we preserve it if we can determine which
176 one it is in. If we cannot determine which temporary may contain the
177 result, all temporaries are preserved. A temporary is preserved by
178 pretending it was allocated at the previous nesting level.
179
180 Automatic variables are also assigned temporary slots, at the nesting
181 level where they are defined. They are marked a "kept" so that
182 free_temp_slots will not free them. */
183
184 struct temp_slot
185 {
186 /* Points to next temporary slot. */
187 struct temp_slot *next;
188 /* The rtx to used to reference the slot. */
189 rtx slot;
190 /* The rtx used to represent the address if not the address of the
191 slot above. May be an EXPR_LIST if multiple addresses exist. */
192 rtx address;
193 /* The alignment (in bits) of the slot. */
194 int align;
195 /* The size, in units, of the slot. */
196 HOST_WIDE_INT size;
197 /* The alias set for the slot. If the alias set is zero, we don't
198 know anything about the alias set of the slot. We must only
199 reuse a slot if it is assigned an object of the same alias set.
200 Otherwise, the rest of the compiler may assume that the new use
201 of the slot cannot alias the old use of the slot, which is
202 false. If the slot has alias set zero, then we can't reuse the
203 slot at all, since we have no idea what alias set may have been
204 imposed on the memory. For example, if the stack slot is the
205 call frame for an inline functioned, we have no idea what alias
206 sets will be assigned to various pieces of the call frame. */
207 HOST_WIDE_INT alias_set;
208 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
209 tree rtl_expr;
210 /* Non-zero if this temporary is currently in use. */
211 char in_use;
212 /* Non-zero if this temporary has its address taken. */
213 char addr_taken;
214 /* Nesting level at which this slot is being used. */
215 int level;
216 /* Non-zero if this should survive a call to free_temp_slots. */
217 int keep;
218 /* The offset of the slot from the frame_pointer, including extra space
219 for alignment. This info is for combine_temp_slots. */
220 HOST_WIDE_INT base_offset;
221 /* The size of the slot, including extra space for alignment. This
222 info is for combine_temp_slots. */
223 HOST_WIDE_INT full_size;
224 };
225 \f
226 /* This structure is used to record MEMs or pseudos used to replace VAR, any
227 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
228 maintain this list in case two operands of an insn were required to match;
229 in that case we must ensure we use the same replacement. */
230
231 struct fixup_replacement
232 {
233 rtx old;
234 rtx new;
235 struct fixup_replacement *next;
236 };
237
238 struct insns_for_mem_entry {
239 /* The KEY in HE will be a MEM. */
240 struct hash_entry he;
241 /* These are the INSNS which reference the MEM. */
242 rtx insns;
243 };
244
245 /* Forward declarations. */
246
247 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
248 int, struct function *));
249 static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode,
250 HOST_WIDE_INT, int, tree));
251 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
252 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
253 enum machine_mode, enum machine_mode,
254 int, unsigned int, int,
255 struct hash_table *));
256 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
257 enum machine_mode,
258 struct hash_table *));
259 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
260 struct hash_table *));
261 static struct fixup_replacement
262 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
263 static void fixup_var_refs_insns PARAMS ((rtx, enum machine_mode, int,
264 rtx, int, struct hash_table *));
265 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
266 struct fixup_replacement **));
267 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
268 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
269 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
270 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
271 static void instantiate_decls PARAMS ((tree, int));
272 static void instantiate_decls_1 PARAMS ((tree, int));
273 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
274 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
275 static void delete_handlers PARAMS ((void));
276 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
277 struct args_size *));
278 #ifndef ARGS_GROW_DOWNWARD
279 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
280 tree));
281 #endif
282 static rtx round_trampoline_addr PARAMS ((rtx));
283 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
284 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
285 static tree blocks_nreverse PARAMS ((tree));
286 static int all_blocks PARAMS ((tree, tree *));
287 static tree *get_block_vector PARAMS ((tree, int *));
288 /* We always define `record_insns' even if its not used so that we
289 can always export `prologue_epilogue_contains'. */
290 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
291 static int contains PARAMS ((rtx, varray_type));
292 #ifdef HAVE_return
293 static void emit_return_into_block PARAMS ((basic_block, rtx));
294 #endif
295 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
296 static boolean purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
297 struct hash_table *));
298 static int is_addressof PARAMS ((rtx *, void *));
299 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
300 struct hash_table *,
301 hash_table_key));
302 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
303 static boolean insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
304 static int insns_for_mem_walk PARAMS ((rtx *, void *));
305 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
306 static void mark_temp_slot PARAMS ((struct temp_slot *));
307 static void mark_function_status PARAMS ((struct function *));
308 static void mark_function_chain PARAMS ((void *));
309 static void prepare_function_start PARAMS ((void));
310 static void do_clobber_return_reg PARAMS ((rtx, void *));
311 static void do_use_return_reg PARAMS ((rtx, void *));
312 \f
313 /* Pointer to chain of `struct function' for containing functions. */
314 struct function *outer_function_chain;
315
316 /* Given a function decl for a containing function,
317 return the `struct function' for it. */
318
319 struct function *
320 find_function_data (decl)
321 tree decl;
322 {
323 struct function *p;
324
325 for (p = outer_function_chain; p; p = p->next)
326 if (p->decl == decl)
327 return p;
328
329 abort ();
330 }
331
332 /* Save the current context for compilation of a nested function.
333 This is called from language-specific code. The caller should use
334 the save_lang_status callback to save any language-specific state,
335 since this function knows only about language-independent
336 variables. */
337
338 void
339 push_function_context_to (context)
340 tree context;
341 {
342 struct function *p, *context_data;
343
344 if (context)
345 {
346 context_data = (context == current_function_decl
347 ? cfun
348 : find_function_data (context));
349 context_data->contains_functions = 1;
350 }
351
352 if (cfun == 0)
353 init_dummy_function_start ();
354 p = cfun;
355
356 p->next = outer_function_chain;
357 outer_function_chain = p;
358 p->fixup_var_refs_queue = 0;
359
360 save_tree_status (p);
361 if (save_lang_status)
362 (*save_lang_status) (p);
363 if (save_machine_status)
364 (*save_machine_status) (p);
365
366 cfun = 0;
367 }
368
369 void
370 push_function_context ()
371 {
372 push_function_context_to (current_function_decl);
373 }
374
375 /* Restore the last saved context, at the end of a nested function.
376 This function is called from language-specific code. */
377
378 void
379 pop_function_context_from (context)
380 tree context ATTRIBUTE_UNUSED;
381 {
382 struct function *p = outer_function_chain;
383 struct var_refs_queue *queue;
384 struct var_refs_queue *next;
385
386 cfun = p;
387 outer_function_chain = p->next;
388
389 current_function_decl = p->decl;
390 reg_renumber = 0;
391
392 restore_tree_status (p);
393 restore_emit_status (p);
394
395 if (restore_machine_status)
396 (*restore_machine_status) (p);
397 if (restore_lang_status)
398 (*restore_lang_status) (p);
399
400 /* Finish doing put_var_into_stack for any of our variables
401 which became addressable during the nested function. */
402 for (queue = p->fixup_var_refs_queue; queue; queue = next)
403 {
404 next = queue->next;
405 fixup_var_refs (queue->modified, queue->promoted_mode,
406 queue->unsignedp, 0);
407 free (queue);
408 }
409 p->fixup_var_refs_queue = 0;
410
411 /* Reset variables that have known state during rtx generation. */
412 rtx_equal_function_value_matters = 1;
413 virtuals_instantiated = 0;
414 }
415
416 void
417 pop_function_context ()
418 {
419 pop_function_context_from (current_function_decl);
420 }
421
422 /* Clear out all parts of the state in F that can safely be discarded
423 after the function has been parsed, but not compiled, to let
424 garbage collection reclaim the memory. */
425
426 void
427 free_after_parsing (f)
428 struct function *f;
429 {
430 /* f->expr->forced_labels is used by code generation. */
431 /* f->emit->regno_reg_rtx is used by code generation. */
432 /* f->varasm is used by code generation. */
433 /* f->eh->eh_return_stub_label is used by code generation. */
434
435 if (free_lang_status)
436 (*free_lang_status) (f);
437 free_stmt_status (f);
438 }
439
440 /* Clear out all parts of the state in F that can safely be discarded
441 after the function has been compiled, to let garbage collection
442 reclaim the memory. */
443
444 void
445 free_after_compilation (f)
446 struct function *f;
447 {
448 struct temp_slot *ts;
449 struct temp_slot *next;
450
451 free_eh_status (f);
452 free_expr_status (f);
453 free_emit_status (f);
454 free_varasm_status (f);
455
456 if (free_machine_status)
457 (*free_machine_status) (f);
458
459 if (f->x_parm_reg_stack_loc)
460 free (f->x_parm_reg_stack_loc);
461
462 for (ts = f->x_temp_slots; ts; ts = next)
463 {
464 next = ts->next;
465 free (ts);
466 }
467 f->x_temp_slots = NULL;
468
469 f->arg_offset_rtx = NULL;
470 f->return_rtx = NULL;
471 f->internal_arg_pointer = NULL;
472 f->x_nonlocal_labels = NULL;
473 f->x_nonlocal_goto_handler_slots = NULL;
474 f->x_nonlocal_goto_handler_labels = NULL;
475 f->x_nonlocal_goto_stack_level = NULL;
476 f->x_cleanup_label = NULL;
477 f->x_return_label = NULL;
478 f->x_save_expr_regs = NULL;
479 f->x_stack_slot_list = NULL;
480 f->x_rtl_expr_chain = NULL;
481 f->x_tail_recursion_label = NULL;
482 f->x_tail_recursion_reentry = NULL;
483 f->x_arg_pointer_save_area = NULL;
484 f->x_context_display = NULL;
485 f->x_trampoline_list = NULL;
486 f->x_parm_birth_insn = NULL;
487 f->x_last_parm_insn = NULL;
488 f->x_parm_reg_stack_loc = NULL;
489 f->fixup_var_refs_queue = NULL;
490 f->original_arg_vector = NULL;
491 f->original_decl_initial = NULL;
492 f->inl_last_parm_insn = NULL;
493 f->epilogue_delay_list = NULL;
494 }
495
496 \f
497 /* Allocate fixed slots in the stack frame of the current function. */
498
499 /* Return size needed for stack frame based on slots so far allocated in
500 function F.
501 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
502 the caller may have to do that. */
503
504 HOST_WIDE_INT
505 get_func_frame_size (f)
506 struct function *f;
507 {
508 #ifdef FRAME_GROWS_DOWNWARD
509 return -f->x_frame_offset;
510 #else
511 return f->x_frame_offset;
512 #endif
513 }
514
515 /* Return size needed for stack frame based on slots so far allocated.
516 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
517 the caller may have to do that. */
518 HOST_WIDE_INT
519 get_frame_size ()
520 {
521 return get_func_frame_size (cfun);
522 }
523
524 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
525 with machine mode MODE.
526
527 ALIGN controls the amount of alignment for the address of the slot:
528 0 means according to MODE,
529 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
530 positive specifies alignment boundary in bits.
531
532 We do not round to stack_boundary here.
533
534 FUNCTION specifies the function to allocate in. */
535
536 static rtx
537 assign_stack_local_1 (mode, size, align, function)
538 enum machine_mode mode;
539 HOST_WIDE_INT size;
540 int align;
541 struct function *function;
542 {
543 register rtx x, addr;
544 int bigend_correction = 0;
545 int alignment;
546
547 /* Allocate in the memory associated with the function in whose frame
548 we are assigning. */
549 if (function != cfun)
550 push_obstacks (function->function_obstack,
551 function->function_maybepermanent_obstack);
552
553 if (align == 0)
554 {
555 tree type;
556
557 if (mode == BLKmode)
558 alignment = BIGGEST_ALIGNMENT;
559 else
560 alignment = GET_MODE_ALIGNMENT (mode);
561
562 /* Allow the target to (possibly) increase the alignment of this
563 stack slot. */
564 type = type_for_mode (mode, 0);
565 if (type)
566 alignment = LOCAL_ALIGNMENT (type, alignment);
567
568 alignment /= BITS_PER_UNIT;
569 }
570 else if (align == -1)
571 {
572 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
573 size = CEIL_ROUND (size, alignment);
574 }
575 else
576 alignment = align / BITS_PER_UNIT;
577
578 #ifdef FRAME_GROWS_DOWNWARD
579 function->x_frame_offset -= size;
580 #endif
581
582 /* Ignore alignment we can't do with expected alignment of the boundary. */
583 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
584 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
585
586 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
587 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
588
589 /* Round frame offset to that alignment.
590 We must be careful here, since FRAME_OFFSET might be negative and
591 division with a negative dividend isn't as well defined as we might
592 like. So we instead assume that ALIGNMENT is a power of two and
593 use logical operations which are unambiguous. */
594 #ifdef FRAME_GROWS_DOWNWARD
595 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
596 #else
597 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
598 #endif
599
600 /* On a big-endian machine, if we are allocating more space than we will use,
601 use the least significant bytes of those that are allocated. */
602 if (BYTES_BIG_ENDIAN && mode != BLKmode)
603 bigend_correction = size - GET_MODE_SIZE (mode);
604
605 /* If we have already instantiated virtual registers, return the actual
606 address relative to the frame pointer. */
607 if (function == cfun && virtuals_instantiated)
608 addr = plus_constant (frame_pointer_rtx,
609 (frame_offset + bigend_correction
610 + STARTING_FRAME_OFFSET));
611 else
612 addr = plus_constant (virtual_stack_vars_rtx,
613 function->x_frame_offset + bigend_correction);
614
615 #ifndef FRAME_GROWS_DOWNWARD
616 function->x_frame_offset += size;
617 #endif
618
619 x = gen_rtx_MEM (mode, addr);
620
621 function->x_stack_slot_list
622 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
623
624 if (function != cfun)
625 pop_obstacks ();
626
627 return x;
628 }
629
630 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
631 current function. */
632
633 rtx
634 assign_stack_local (mode, size, align)
635 enum machine_mode mode;
636 HOST_WIDE_INT size;
637 int align;
638 {
639 return assign_stack_local_1 (mode, size, align, cfun);
640 }
641 \f
642 /* Allocate a temporary stack slot and record it for possible later
643 reuse.
644
645 MODE is the machine mode to be given to the returned rtx.
646
647 SIZE is the size in units of the space required. We do no rounding here
648 since assign_stack_local will do any required rounding.
649
650 KEEP is 1 if this slot is to be retained after a call to
651 free_temp_slots. Automatic variables for a block are allocated
652 with this flag. KEEP is 2 if we allocate a longer term temporary,
653 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
654 if we are to allocate something at an inner level to be treated as
655 a variable in the block (e.g., a SAVE_EXPR).
656
657 TYPE is the type that will be used for the stack slot. */
658
659 static rtx
660 assign_stack_temp_for_type (mode, size, keep, type)
661 enum machine_mode mode;
662 HOST_WIDE_INT size;
663 int keep;
664 tree type;
665 {
666 int align;
667 HOST_WIDE_INT alias_set;
668 struct temp_slot *p, *best_p = 0;
669
670 /* If SIZE is -1 it means that somebody tried to allocate a temporary
671 of a variable size. */
672 if (size == -1)
673 abort ();
674
675 /* If we know the alias set for the memory that will be used, use
676 it. If there's no TYPE, then we don't know anything about the
677 alias set for the memory. */
678 if (type)
679 alias_set = get_alias_set (type);
680 else
681 alias_set = 0;
682
683 if (mode == BLKmode)
684 align = BIGGEST_ALIGNMENT;
685 else
686 align = GET_MODE_ALIGNMENT (mode);
687
688 if (! type)
689 type = type_for_mode (mode, 0);
690
691 if (type)
692 align = LOCAL_ALIGNMENT (type, align);
693
694 /* Try to find an available, already-allocated temporary of the proper
695 mode which meets the size and alignment requirements. Choose the
696 smallest one with the closest alignment. */
697 for (p = temp_slots; p; p = p->next)
698 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
699 && ! p->in_use
700 && (! flag_strict_aliasing
701 || (alias_set && p->alias_set == alias_set))
702 && (best_p == 0 || best_p->size > p->size
703 || (best_p->size == p->size && best_p->align > p->align)))
704 {
705 if (p->align == align && p->size == size)
706 {
707 best_p = 0;
708 break;
709 }
710 best_p = p;
711 }
712
713 /* Make our best, if any, the one to use. */
714 if (best_p)
715 {
716 /* If there are enough aligned bytes left over, make them into a new
717 temp_slot so that the extra bytes don't get wasted. Do this only
718 for BLKmode slots, so that we can be sure of the alignment. */
719 if (GET_MODE (best_p->slot) == BLKmode)
720 {
721 int alignment = best_p->align / BITS_PER_UNIT;
722 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
723
724 if (best_p->size - rounded_size >= alignment)
725 {
726 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
727 p->in_use = p->addr_taken = 0;
728 p->size = best_p->size - rounded_size;
729 p->base_offset = best_p->base_offset + rounded_size;
730 p->full_size = best_p->full_size - rounded_size;
731 p->slot = gen_rtx_MEM (BLKmode,
732 plus_constant (XEXP (best_p->slot, 0),
733 rounded_size));
734 p->align = best_p->align;
735 p->address = 0;
736 p->rtl_expr = 0;
737 p->alias_set = best_p->alias_set;
738 p->next = temp_slots;
739 temp_slots = p;
740
741 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
742 stack_slot_list);
743
744 best_p->size = rounded_size;
745 best_p->full_size = rounded_size;
746 }
747 }
748
749 p = best_p;
750 }
751
752 /* If we still didn't find one, make a new temporary. */
753 if (p == 0)
754 {
755 HOST_WIDE_INT frame_offset_old = frame_offset;
756
757 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
758
759 /* We are passing an explicit alignment request to assign_stack_local.
760 One side effect of that is assign_stack_local will not round SIZE
761 to ensure the frame offset remains suitably aligned.
762
763 So for requests which depended on the rounding of SIZE, we go ahead
764 and round it now. We also make sure ALIGNMENT is at least
765 BIGGEST_ALIGNMENT. */
766 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
767 abort();
768 p->slot = assign_stack_local (mode,
769 (mode == BLKmode
770 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
771 : size),
772 align);
773
774 p->align = align;
775 p->alias_set = alias_set;
776
777 /* The following slot size computation is necessary because we don't
778 know the actual size of the temporary slot until assign_stack_local
779 has performed all the frame alignment and size rounding for the
780 requested temporary. Note that extra space added for alignment
781 can be either above or below this stack slot depending on which
782 way the frame grows. We include the extra space if and only if it
783 is above this slot. */
784 #ifdef FRAME_GROWS_DOWNWARD
785 p->size = frame_offset_old - frame_offset;
786 #else
787 p->size = size;
788 #endif
789
790 /* Now define the fields used by combine_temp_slots. */
791 #ifdef FRAME_GROWS_DOWNWARD
792 p->base_offset = frame_offset;
793 p->full_size = frame_offset_old - frame_offset;
794 #else
795 p->base_offset = frame_offset_old;
796 p->full_size = frame_offset - frame_offset_old;
797 #endif
798 p->address = 0;
799 p->next = temp_slots;
800 temp_slots = p;
801 }
802
803 p->in_use = 1;
804 p->addr_taken = 0;
805 p->rtl_expr = seq_rtl_expr;
806
807 if (keep == 2)
808 {
809 p->level = target_temp_slot_level;
810 p->keep = 0;
811 }
812 else if (keep == 3)
813 {
814 p->level = var_temp_slot_level;
815 p->keep = 0;
816 }
817 else
818 {
819 p->level = temp_slot_level;
820 p->keep = keep;
821 }
822
823 /* We may be reusing an old slot, so clear any MEM flags that may have been
824 set from before. */
825 RTX_UNCHANGING_P (p->slot) = 0;
826 MEM_IN_STRUCT_P (p->slot) = 0;
827 MEM_SCALAR_P (p->slot) = 0;
828 MEM_ALIAS_SET (p->slot) = alias_set;
829
830 if (type != 0)
831 MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
832
833 return p->slot;
834 }
835
836 /* Allocate a temporary stack slot and record it for possible later
837 reuse. First three arguments are same as in preceding function. */
838
839 rtx
840 assign_stack_temp (mode, size, keep)
841 enum machine_mode mode;
842 HOST_WIDE_INT size;
843 int keep;
844 {
845 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
846 }
847 \f
848 /* Assign a temporary of given TYPE.
849 KEEP is as for assign_stack_temp.
850 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
851 it is 0 if a register is OK.
852 DONT_PROMOTE is 1 if we should not promote values in register
853 to wider modes. */
854
855 rtx
856 assign_temp (type, keep, memory_required, dont_promote)
857 tree type;
858 int keep;
859 int memory_required;
860 int dont_promote ATTRIBUTE_UNUSED;
861 {
862 enum machine_mode mode = TYPE_MODE (type);
863 #ifndef PROMOTE_FOR_CALL_ONLY
864 int unsignedp = TREE_UNSIGNED (type);
865 #endif
866
867 if (mode == BLKmode || memory_required)
868 {
869 HOST_WIDE_INT size = int_size_in_bytes (type);
870 rtx tmp;
871
872 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
873 problems with allocating the stack space. */
874 if (size == 0)
875 size = 1;
876
877 /* Unfortunately, we don't yet know how to allocate variable-sized
878 temporaries. However, sometimes we have a fixed upper limit on
879 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
880 instead. This is the case for Chill variable-sized strings. */
881 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
882 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
883 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
884 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
885
886 tmp = assign_stack_temp_for_type (mode, size, keep, type);
887 return tmp;
888 }
889
890 #ifndef PROMOTE_FOR_CALL_ONLY
891 if (! dont_promote)
892 mode = promote_mode (type, mode, &unsignedp, 0);
893 #endif
894
895 return gen_reg_rtx (mode);
896 }
897 \f
898 /* Combine temporary stack slots which are adjacent on the stack.
899
900 This allows for better use of already allocated stack space. This is only
901 done for BLKmode slots because we can be sure that we won't have alignment
902 problems in this case. */
903
904 void
905 combine_temp_slots ()
906 {
907 struct temp_slot *p, *q;
908 struct temp_slot *prev_p, *prev_q;
909 int num_slots;
910
911 /* We can't combine slots, because the information about which slot
912 is in which alias set will be lost. */
913 if (flag_strict_aliasing)
914 return;
915
916 /* If there are a lot of temp slots, don't do anything unless
917 high levels of optimizaton. */
918 if (! flag_expensive_optimizations)
919 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
920 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
921 return;
922
923 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
924 {
925 int delete_p = 0;
926
927 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
928 for (q = p->next, prev_q = p; q; q = prev_q->next)
929 {
930 int delete_q = 0;
931 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
932 {
933 if (p->base_offset + p->full_size == q->base_offset)
934 {
935 /* Q comes after P; combine Q into P. */
936 p->size += q->size;
937 p->full_size += q->full_size;
938 delete_q = 1;
939 }
940 else if (q->base_offset + q->full_size == p->base_offset)
941 {
942 /* P comes after Q; combine P into Q. */
943 q->size += p->size;
944 q->full_size += p->full_size;
945 delete_p = 1;
946 break;
947 }
948 }
949 /* Either delete Q or advance past it. */
950 if (delete_q)
951 {
952 prev_q->next = q->next;
953 free (q);
954 }
955 else
956 prev_q = q;
957 }
958 /* Either delete P or advance past it. */
959 if (delete_p)
960 {
961 if (prev_p)
962 prev_p->next = p->next;
963 else
964 temp_slots = p->next;
965 }
966 else
967 prev_p = p;
968 }
969 }
970 \f
971 /* Find the temp slot corresponding to the object at address X. */
972
973 static struct temp_slot *
974 find_temp_slot_from_address (x)
975 rtx x;
976 {
977 struct temp_slot *p;
978 rtx next;
979
980 for (p = temp_slots; p; p = p->next)
981 {
982 if (! p->in_use)
983 continue;
984
985 else if (XEXP (p->slot, 0) == x
986 || p->address == x
987 || (GET_CODE (x) == PLUS
988 && XEXP (x, 0) == virtual_stack_vars_rtx
989 && GET_CODE (XEXP (x, 1)) == CONST_INT
990 && INTVAL (XEXP (x, 1)) >= p->base_offset
991 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
992 return p;
993
994 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
995 for (next = p->address; next; next = XEXP (next, 1))
996 if (XEXP (next, 0) == x)
997 return p;
998 }
999
1000 /* If we have a sum involving a register, see if it points to a temp
1001 slot. */
1002 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1003 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1004 return p;
1005 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1006 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1007 return p;
1008
1009 return 0;
1010 }
1011
1012 /* Indicate that NEW is an alternate way of referring to the temp slot
1013 that previously was known by OLD. */
1014
1015 void
1016 update_temp_slot_address (old, new)
1017 rtx old, new;
1018 {
1019 struct temp_slot *p;
1020
1021 if (rtx_equal_p (old, new))
1022 return;
1023
1024 p = find_temp_slot_from_address (old);
1025
1026 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1027 is a register, see if one operand of the PLUS is a temporary
1028 location. If so, NEW points into it. Otherwise, if both OLD and
1029 NEW are a PLUS and if there is a register in common between them.
1030 If so, try a recursive call on those values. */
1031 if (p == 0)
1032 {
1033 if (GET_CODE (old) != PLUS)
1034 return;
1035
1036 if (GET_CODE (new) == REG)
1037 {
1038 update_temp_slot_address (XEXP (old, 0), new);
1039 update_temp_slot_address (XEXP (old, 1), new);
1040 return;
1041 }
1042 else if (GET_CODE (new) != PLUS)
1043 return;
1044
1045 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1046 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1047 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1048 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1049 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1050 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1051 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1052 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1053
1054 return;
1055 }
1056
1057 /* Otherwise add an alias for the temp's address. */
1058 else if (p->address == 0)
1059 p->address = new;
1060 else
1061 {
1062 if (GET_CODE (p->address) != EXPR_LIST)
1063 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1064
1065 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1066 }
1067 }
1068
1069 /* If X could be a reference to a temporary slot, mark the fact that its
1070 address was taken. */
1071
1072 void
1073 mark_temp_addr_taken (x)
1074 rtx x;
1075 {
1076 struct temp_slot *p;
1077
1078 if (x == 0)
1079 return;
1080
1081 /* If X is not in memory or is at a constant address, it cannot be in
1082 a temporary slot. */
1083 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1084 return;
1085
1086 p = find_temp_slot_from_address (XEXP (x, 0));
1087 if (p != 0)
1088 p->addr_taken = 1;
1089 }
1090
1091 /* If X could be a reference to a temporary slot, mark that slot as
1092 belonging to the to one level higher than the current level. If X
1093 matched one of our slots, just mark that one. Otherwise, we can't
1094 easily predict which it is, so upgrade all of them. Kept slots
1095 need not be touched.
1096
1097 This is called when an ({...}) construct occurs and a statement
1098 returns a value in memory. */
1099
1100 void
1101 preserve_temp_slots (x)
1102 rtx x;
1103 {
1104 struct temp_slot *p = 0;
1105
1106 /* If there is no result, we still might have some objects whose address
1107 were taken, so we need to make sure they stay around. */
1108 if (x == 0)
1109 {
1110 for (p = temp_slots; p; p = p->next)
1111 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1112 p->level--;
1113
1114 return;
1115 }
1116
1117 /* If X is a register that is being used as a pointer, see if we have
1118 a temporary slot we know it points to. To be consistent with
1119 the code below, we really should preserve all non-kept slots
1120 if we can't find a match, but that seems to be much too costly. */
1121 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1122 p = find_temp_slot_from_address (x);
1123
1124 /* If X is not in memory or is at a constant address, it cannot be in
1125 a temporary slot, but it can contain something whose address was
1126 taken. */
1127 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1128 {
1129 for (p = temp_slots; p; p = p->next)
1130 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1131 p->level--;
1132
1133 return;
1134 }
1135
1136 /* First see if we can find a match. */
1137 if (p == 0)
1138 p = find_temp_slot_from_address (XEXP (x, 0));
1139
1140 if (p != 0)
1141 {
1142 /* Move everything at our level whose address was taken to our new
1143 level in case we used its address. */
1144 struct temp_slot *q;
1145
1146 if (p->level == temp_slot_level)
1147 {
1148 for (q = temp_slots; q; q = q->next)
1149 if (q != p && q->addr_taken && q->level == p->level)
1150 q->level--;
1151
1152 p->level--;
1153 p->addr_taken = 0;
1154 }
1155 return;
1156 }
1157
1158 /* Otherwise, preserve all non-kept slots at this level. */
1159 for (p = temp_slots; p; p = p->next)
1160 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1161 p->level--;
1162 }
1163
1164 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1165 with that RTL_EXPR, promote it into a temporary slot at the present
1166 level so it will not be freed when we free slots made in the
1167 RTL_EXPR. */
1168
1169 void
1170 preserve_rtl_expr_result (x)
1171 rtx x;
1172 {
1173 struct temp_slot *p;
1174
1175 /* If X is not in memory or is at a constant address, it cannot be in
1176 a temporary slot. */
1177 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1178 return;
1179
1180 /* If we can find a match, move it to our level unless it is already at
1181 an upper level. */
1182 p = find_temp_slot_from_address (XEXP (x, 0));
1183 if (p != 0)
1184 {
1185 p->level = MIN (p->level, temp_slot_level);
1186 p->rtl_expr = 0;
1187 }
1188
1189 return;
1190 }
1191
1192 /* Free all temporaries used so far. This is normally called at the end
1193 of generating code for a statement. Don't free any temporaries
1194 currently in use for an RTL_EXPR that hasn't yet been emitted.
1195 We could eventually do better than this since it can be reused while
1196 generating the same RTL_EXPR, but this is complex and probably not
1197 worthwhile. */
1198
1199 void
1200 free_temp_slots ()
1201 {
1202 struct temp_slot *p;
1203
1204 for (p = temp_slots; p; p = p->next)
1205 if (p->in_use && p->level == temp_slot_level && ! p->keep
1206 && p->rtl_expr == 0)
1207 p->in_use = 0;
1208
1209 combine_temp_slots ();
1210 }
1211
1212 /* Free all temporary slots used in T, an RTL_EXPR node. */
1213
1214 void
1215 free_temps_for_rtl_expr (t)
1216 tree t;
1217 {
1218 struct temp_slot *p;
1219
1220 for (p = temp_slots; p; p = p->next)
1221 if (p->rtl_expr == t)
1222 {
1223 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1224 needs to be preserved. This can happen if a temporary in
1225 the RTL_EXPR was addressed; preserve_temp_slots will move
1226 the temporary into a higher level. */
1227 if (temp_slot_level <= p->level)
1228 p->in_use = 0;
1229 else
1230 p->rtl_expr = NULL_TREE;
1231 }
1232
1233 combine_temp_slots ();
1234 }
1235
1236 /* Mark all temporaries ever allocated in this function as not suitable
1237 for reuse until the current level is exited. */
1238
1239 void
1240 mark_all_temps_used ()
1241 {
1242 struct temp_slot *p;
1243
1244 for (p = temp_slots; p; p = p->next)
1245 {
1246 p->in_use = p->keep = 1;
1247 p->level = MIN (p->level, temp_slot_level);
1248 }
1249 }
1250
1251 /* Push deeper into the nesting level for stack temporaries. */
1252
1253 void
1254 push_temp_slots ()
1255 {
1256 temp_slot_level++;
1257 }
1258
1259 /* Likewise, but save the new level as the place to allocate variables
1260 for blocks. */
1261
1262 #if 0
1263 void
1264 push_temp_slots_for_block ()
1265 {
1266 push_temp_slots ();
1267
1268 var_temp_slot_level = temp_slot_level;
1269 }
1270
1271 /* Likewise, but save the new level as the place to allocate temporaries
1272 for TARGET_EXPRs. */
1273
1274 void
1275 push_temp_slots_for_target ()
1276 {
1277 push_temp_slots ();
1278
1279 target_temp_slot_level = temp_slot_level;
1280 }
1281
1282 /* Set and get the value of target_temp_slot_level. The only
1283 permitted use of these functions is to save and restore this value. */
1284
1285 int
1286 get_target_temp_slot_level ()
1287 {
1288 return target_temp_slot_level;
1289 }
1290
1291 void
1292 set_target_temp_slot_level (level)
1293 int level;
1294 {
1295 target_temp_slot_level = level;
1296 }
1297 #endif
1298
1299 /* Pop a temporary nesting level. All slots in use in the current level
1300 are freed. */
1301
1302 void
1303 pop_temp_slots ()
1304 {
1305 struct temp_slot *p;
1306
1307 for (p = temp_slots; p; p = p->next)
1308 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1309 p->in_use = 0;
1310
1311 combine_temp_slots ();
1312
1313 temp_slot_level--;
1314 }
1315
1316 /* Initialize temporary slots. */
1317
1318 void
1319 init_temp_slots ()
1320 {
1321 /* We have not allocated any temporaries yet. */
1322 temp_slots = 0;
1323 temp_slot_level = 0;
1324 var_temp_slot_level = 0;
1325 target_temp_slot_level = 0;
1326 }
1327 \f
1328 /* Retroactively move an auto variable from a register to a stack slot.
1329 This is done when an address-reference to the variable is seen. */
1330
1331 void
1332 put_var_into_stack (decl)
1333 tree decl;
1334 {
1335 register rtx reg;
1336 enum machine_mode promoted_mode, decl_mode;
1337 struct function *function = 0;
1338 tree context;
1339 int can_use_addressof;
1340 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1341 int usedp = (TREE_USED (decl)
1342 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1343
1344 context = decl_function_context (decl);
1345
1346 /* Get the current rtl used for this object and its original mode. */
1347 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1348
1349 /* No need to do anything if decl has no rtx yet
1350 since in that case caller is setting TREE_ADDRESSABLE
1351 and a stack slot will be assigned when the rtl is made. */
1352 if (reg == 0)
1353 return;
1354
1355 /* Get the declared mode for this object. */
1356 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1357 : DECL_MODE (decl));
1358 /* Get the mode it's actually stored in. */
1359 promoted_mode = GET_MODE (reg);
1360
1361 /* If this variable comes from an outer function,
1362 find that function's saved context. */
1363 if (context != current_function_decl && context != inline_function_decl)
1364 for (function = outer_function_chain; function; function = function->next)
1365 if (function->decl == context)
1366 break;
1367
1368 /* If this is a variable-size object with a pseudo to address it,
1369 put that pseudo into the stack, if the var is nonlocal. */
1370 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1371 && GET_CODE (reg) == MEM
1372 && GET_CODE (XEXP (reg, 0)) == REG
1373 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1374 {
1375 reg = XEXP (reg, 0);
1376 decl_mode = promoted_mode = GET_MODE (reg);
1377 }
1378
1379 can_use_addressof
1380 = (function == 0
1381 && optimize > 0
1382 /* FIXME make it work for promoted modes too */
1383 && decl_mode == promoted_mode
1384 #ifdef NON_SAVING_SETJMP
1385 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1386 #endif
1387 );
1388
1389 /* If we can't use ADDRESSOF, make sure we see through one we already
1390 generated. */
1391 if (! can_use_addressof && GET_CODE (reg) == MEM
1392 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1393 reg = XEXP (XEXP (reg, 0), 0);
1394
1395 /* Now we should have a value that resides in one or more pseudo regs. */
1396
1397 if (GET_CODE (reg) == REG)
1398 {
1399 /* If this variable lives in the current function and we don't need
1400 to put things in the stack for the sake of setjmp, try to keep it
1401 in a register until we know we actually need the address. */
1402 if (can_use_addressof)
1403 gen_mem_addressof (reg, decl);
1404 else
1405 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1406 decl_mode, volatilep, 0, usedp, 0);
1407 }
1408 else if (GET_CODE (reg) == CONCAT)
1409 {
1410 /* A CONCAT contains two pseudos; put them both in the stack.
1411 We do it so they end up consecutive.
1412 We fixup references to the parts only after we fixup references
1413 to the whole CONCAT, lest we do double fixups for the latter
1414 references. */
1415 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1416 tree part_type = type_for_mode (part_mode, 0);
1417 rtx lopart = XEXP (reg, 0);
1418 rtx hipart = XEXP (reg, 1);
1419 #ifdef FRAME_GROWS_DOWNWARD
1420 /* Since part 0 should have a lower address, do it second. */
1421 put_reg_into_stack (function, hipart, part_type, part_mode,
1422 part_mode, volatilep, 0, 0, 0);
1423 put_reg_into_stack (function, lopart, part_type, part_mode,
1424 part_mode, volatilep, 0, 0, 0);
1425 #else
1426 put_reg_into_stack (function, lopart, part_type, part_mode,
1427 part_mode, volatilep, 0, 0, 0);
1428 put_reg_into_stack (function, hipart, part_type, part_mode,
1429 part_mode, volatilep, 0, 0, 0);
1430 #endif
1431
1432 /* Change the CONCAT into a combined MEM for both parts. */
1433 PUT_CODE (reg, MEM);
1434 set_mem_attributes (reg, decl, 1);
1435
1436 /* The two parts are in memory order already.
1437 Use the lower parts address as ours. */
1438 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1439 /* Prevent sharing of rtl that might lose. */
1440 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1441 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1442 if (usedp)
1443 {
1444 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1445 promoted_mode, 0);
1446 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1447 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1448 }
1449 }
1450 else
1451 return;
1452
1453 if (current_function_check_memory_usage)
1454 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1455 XEXP (reg, 0), Pmode,
1456 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1457 TYPE_MODE (sizetype),
1458 GEN_INT (MEMORY_USE_RW),
1459 TYPE_MODE (integer_type_node));
1460 }
1461
1462 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1463 into the stack frame of FUNCTION (0 means the current function).
1464 DECL_MODE is the machine mode of the user-level data type.
1465 PROMOTED_MODE is the machine mode of the register.
1466 VOLATILE_P is nonzero if this is for a "volatile" decl.
1467 USED_P is nonzero if this reg might have already been used in an insn. */
1468
1469 static void
1470 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1471 original_regno, used_p, ht)
1472 struct function *function;
1473 rtx reg;
1474 tree type;
1475 enum machine_mode promoted_mode, decl_mode;
1476 int volatile_p;
1477 unsigned int original_regno;
1478 int used_p;
1479 struct hash_table *ht;
1480 {
1481 struct function *func = function ? function : cfun;
1482 rtx new = 0;
1483 unsigned int regno = original_regno;
1484
1485 if (regno == 0)
1486 regno = REGNO (reg);
1487
1488 if (regno < func->x_max_parm_reg)
1489 new = func->x_parm_reg_stack_loc[regno];
1490
1491 if (new == 0)
1492 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1493
1494 PUT_CODE (reg, MEM);
1495 PUT_MODE (reg, decl_mode);
1496 XEXP (reg, 0) = XEXP (new, 0);
1497 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1498 MEM_VOLATILE_P (reg) = volatile_p;
1499
1500 /* If this is a memory ref that contains aggregate components,
1501 mark it as such for cse and loop optimize. If we are reusing a
1502 previously generated stack slot, then we need to copy the bit in
1503 case it was set for other reasons. For instance, it is set for
1504 __builtin_va_alist. */
1505 if (type)
1506 {
1507 MEM_SET_IN_STRUCT_P (reg,
1508 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1509 MEM_ALIAS_SET (reg) = get_alias_set (type);
1510 }
1511 if (used_p)
1512 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1513 }
1514
1515 /* Make sure that all refs to the variable, previously made
1516 when it was a register, are fixed up to be valid again.
1517 See function above for meaning of arguments. */
1518 static void
1519 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1520 struct function *function;
1521 rtx reg;
1522 tree type;
1523 enum machine_mode promoted_mode;
1524 struct hash_table *ht;
1525 {
1526 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1527
1528 if (function != 0)
1529 {
1530 struct var_refs_queue *temp;
1531
1532 temp
1533 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1534 temp->modified = reg;
1535 temp->promoted_mode = promoted_mode;
1536 temp->unsignedp = unsigned_p;
1537 temp->next = function->fixup_var_refs_queue;
1538 function->fixup_var_refs_queue = temp;
1539 }
1540 else
1541 /* Variable is local; fix it up now. */
1542 fixup_var_refs (reg, promoted_mode, unsigned_p, ht);
1543 }
1544 \f
1545 static void
1546 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1547 rtx var;
1548 enum machine_mode promoted_mode;
1549 int unsignedp;
1550 struct hash_table *ht;
1551 {
1552 tree pending;
1553 rtx first_insn = get_insns ();
1554 struct sequence_stack *stack = seq_stack;
1555 tree rtl_exps = rtl_expr_chain;
1556 rtx insn;
1557
1558 /* Must scan all insns for stack-refs that exceed the limit. */
1559 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1560 stack == 0, ht);
1561 /* If there's a hash table, it must record all uses of VAR. */
1562 if (ht)
1563 return;
1564
1565 /* Scan all pending sequences too. */
1566 for (; stack; stack = stack->next)
1567 {
1568 push_to_sequence (stack->first);
1569 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1570 stack->first, stack->next != 0, 0);
1571 /* Update remembered end of sequence
1572 in case we added an insn at the end. */
1573 stack->last = get_last_insn ();
1574 end_sequence ();
1575 }
1576
1577 /* Scan all waiting RTL_EXPRs too. */
1578 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1579 {
1580 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1581 if (seq != const0_rtx && seq != 0)
1582 {
1583 push_to_sequence (seq);
1584 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1585 0);
1586 end_sequence ();
1587 }
1588 }
1589
1590 /* Scan the catch clauses for exception handling too. */
1591 push_to_full_sequence (catch_clauses, catch_clauses_last);
1592 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1593 0, 0);
1594 end_full_sequence (&catch_clauses, &catch_clauses_last);
1595
1596 /* Scan sequences saved in CALL_PLACEHOLDERS too. */
1597 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1598 {
1599 if (GET_CODE (insn) == CALL_INSN
1600 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1601 {
1602 int i;
1603
1604 /* Look at the Normal call, sibling call and tail recursion
1605 sequences attached to the CALL_PLACEHOLDER. */
1606 for (i = 0; i < 3; i++)
1607 {
1608 rtx seq = XEXP (PATTERN (insn), i);
1609 if (seq)
1610 {
1611 push_to_sequence (seq);
1612 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1613 seq, 0, 0);
1614 XEXP (PATTERN (insn), i) = get_insns ();
1615 end_sequence ();
1616 }
1617 }
1618 }
1619 }
1620 }
1621 \f
1622 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1623 some part of an insn. Return a struct fixup_replacement whose OLD
1624 value is equal to X. Allocate a new structure if no such entry exists. */
1625
1626 static struct fixup_replacement *
1627 find_fixup_replacement (replacements, x)
1628 struct fixup_replacement **replacements;
1629 rtx x;
1630 {
1631 struct fixup_replacement *p;
1632
1633 /* See if we have already replaced this. */
1634 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1635 ;
1636
1637 if (p == 0)
1638 {
1639 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1640 p->old = x;
1641 p->new = 0;
1642 p->next = *replacements;
1643 *replacements = p;
1644 }
1645
1646 return p;
1647 }
1648
1649 /* Scan the insn-chain starting with INSN for refs to VAR
1650 and fix them up. TOPLEVEL is nonzero if this chain is the
1651 main chain of insns for the current function. */
1652
1653 static void
1654 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1655 rtx var;
1656 enum machine_mode promoted_mode;
1657 int unsignedp;
1658 rtx insn;
1659 int toplevel;
1660 struct hash_table *ht;
1661 {
1662 rtx call_dest = 0;
1663 rtx insn_list = NULL_RTX;
1664
1665 /* If we already know which INSNs reference VAR there's no need
1666 to walk the entire instruction chain. */
1667 if (ht)
1668 {
1669 insn_list = ((struct insns_for_mem_entry *)
1670 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1671 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1672 insn_list = XEXP (insn_list, 1);
1673 }
1674
1675 while (insn)
1676 {
1677 rtx next = NEXT_INSN (insn);
1678 rtx set, prev, prev_set;
1679 rtx note;
1680
1681 if (INSN_P (insn))
1682 {
1683 /* Remember the notes in case we delete the insn. */
1684 note = REG_NOTES (insn);
1685
1686 /* If this is a CLOBBER of VAR, delete it.
1687
1688 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1689 and REG_RETVAL notes too. */
1690 if (GET_CODE (PATTERN (insn)) == CLOBBER
1691 && (XEXP (PATTERN (insn), 0) == var
1692 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1693 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1694 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1695 {
1696 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1697 /* The REG_LIBCALL note will go away since we are going to
1698 turn INSN into a NOTE, so just delete the
1699 corresponding REG_RETVAL note. */
1700 remove_note (XEXP (note, 0),
1701 find_reg_note (XEXP (note, 0), REG_RETVAL,
1702 NULL_RTX));
1703
1704 /* In unoptimized compilation, we shouldn't call delete_insn
1705 except in jump.c doing warnings. */
1706 PUT_CODE (insn, NOTE);
1707 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1708 NOTE_SOURCE_FILE (insn) = 0;
1709 }
1710
1711 /* The insn to load VAR from a home in the arglist
1712 is now a no-op. When we see it, just delete it.
1713 Similarly if this is storing VAR from a register from which
1714 it was loaded in the previous insn. This will occur
1715 when an ADDRESSOF was made for an arglist slot. */
1716 else if (toplevel
1717 && (set = single_set (insn)) != 0
1718 && SET_DEST (set) == var
1719 /* If this represents the result of an insn group,
1720 don't delete the insn. */
1721 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1722 && (rtx_equal_p (SET_SRC (set), var)
1723 || (GET_CODE (SET_SRC (set)) == REG
1724 && (prev = prev_nonnote_insn (insn)) != 0
1725 && (prev_set = single_set (prev)) != 0
1726 && SET_DEST (prev_set) == SET_SRC (set)
1727 && rtx_equal_p (SET_SRC (prev_set), var))))
1728 {
1729 /* In unoptimized compilation, we shouldn't call delete_insn
1730 except in jump.c doing warnings. */
1731 PUT_CODE (insn, NOTE);
1732 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1733 NOTE_SOURCE_FILE (insn) = 0;
1734 if (insn == last_parm_insn)
1735 last_parm_insn = PREV_INSN (next);
1736 }
1737 else
1738 {
1739 struct fixup_replacement *replacements = 0;
1740 rtx next_insn = NEXT_INSN (insn);
1741
1742 if (SMALL_REGISTER_CLASSES)
1743 {
1744 /* If the insn that copies the results of a CALL_INSN
1745 into a pseudo now references VAR, we have to use an
1746 intermediate pseudo since we want the life of the
1747 return value register to be only a single insn.
1748
1749 If we don't use an intermediate pseudo, such things as
1750 address computations to make the address of VAR valid
1751 if it is not can be placed between the CALL_INSN and INSN.
1752
1753 To make sure this doesn't happen, we record the destination
1754 of the CALL_INSN and see if the next insn uses both that
1755 and VAR. */
1756
1757 if (call_dest != 0 && GET_CODE (insn) == INSN
1758 && reg_mentioned_p (var, PATTERN (insn))
1759 && reg_mentioned_p (call_dest, PATTERN (insn)))
1760 {
1761 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1762
1763 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1764
1765 PATTERN (insn) = replace_rtx (PATTERN (insn),
1766 call_dest, temp);
1767 }
1768
1769 if (GET_CODE (insn) == CALL_INSN
1770 && GET_CODE (PATTERN (insn)) == SET)
1771 call_dest = SET_DEST (PATTERN (insn));
1772 else if (GET_CODE (insn) == CALL_INSN
1773 && GET_CODE (PATTERN (insn)) == PARALLEL
1774 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1775 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1776 else
1777 call_dest = 0;
1778 }
1779
1780 /* See if we have to do anything to INSN now that VAR is in
1781 memory. If it needs to be loaded into a pseudo, use a single
1782 pseudo for the entire insn in case there is a MATCH_DUP
1783 between two operands. We pass a pointer to the head of
1784 a list of struct fixup_replacements. If fixup_var_refs_1
1785 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1786 it will record them in this list.
1787
1788 If it allocated a pseudo for any replacement, we copy into
1789 it here. */
1790
1791 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1792 &replacements);
1793
1794 /* If this is last_parm_insn, and any instructions were output
1795 after it to fix it up, then we must set last_parm_insn to
1796 the last such instruction emitted. */
1797 if (insn == last_parm_insn)
1798 last_parm_insn = PREV_INSN (next_insn);
1799
1800 while (replacements)
1801 {
1802 if (GET_CODE (replacements->new) == REG)
1803 {
1804 rtx insert_before;
1805 rtx seq;
1806
1807 /* OLD might be a (subreg (mem)). */
1808 if (GET_CODE (replacements->old) == SUBREG)
1809 replacements->old
1810 = fixup_memory_subreg (replacements->old, insn, 0);
1811 else
1812 replacements->old
1813 = fixup_stack_1 (replacements->old, insn);
1814
1815 insert_before = insn;
1816
1817 /* If we are changing the mode, do a conversion.
1818 This might be wasteful, but combine.c will
1819 eliminate much of the waste. */
1820
1821 if (GET_MODE (replacements->new)
1822 != GET_MODE (replacements->old))
1823 {
1824 start_sequence ();
1825 convert_move (replacements->new,
1826 replacements->old, unsignedp);
1827 seq = gen_sequence ();
1828 end_sequence ();
1829 }
1830 else
1831 seq = gen_move_insn (replacements->new,
1832 replacements->old);
1833
1834 emit_insn_before (seq, insert_before);
1835 }
1836
1837 replacements = replacements->next;
1838 }
1839 }
1840
1841 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1842 But don't touch other insns referred to by reg-notes;
1843 we will get them elsewhere. */
1844 while (note)
1845 {
1846 if (GET_CODE (note) != INSN_LIST)
1847 XEXP (note, 0)
1848 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1849 note = XEXP (note, 1);
1850 }
1851 }
1852
1853 if (!ht)
1854 insn = next;
1855 else if (insn_list)
1856 {
1857 insn = XEXP (insn_list, 0);
1858 insn_list = XEXP (insn_list, 1);
1859 }
1860 else
1861 insn = NULL_RTX;
1862 }
1863 }
1864 \f
1865 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1866 See if the rtx expression at *LOC in INSN needs to be changed.
1867
1868 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1869 contain a list of original rtx's and replacements. If we find that we need
1870 to modify this insn by replacing a memory reference with a pseudo or by
1871 making a new MEM to implement a SUBREG, we consult that list to see if
1872 we have already chosen a replacement. If none has already been allocated,
1873 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1874 or the SUBREG, as appropriate, to the pseudo. */
1875
1876 static void
1877 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1878 register rtx var;
1879 enum machine_mode promoted_mode;
1880 register rtx *loc;
1881 rtx insn;
1882 struct fixup_replacement **replacements;
1883 {
1884 register int i;
1885 register rtx x = *loc;
1886 RTX_CODE code = GET_CODE (x);
1887 register const char *fmt;
1888 register rtx tem, tem1;
1889 struct fixup_replacement *replacement;
1890
1891 switch (code)
1892 {
1893 case ADDRESSOF:
1894 if (XEXP (x, 0) == var)
1895 {
1896 /* Prevent sharing of rtl that might lose. */
1897 rtx sub = copy_rtx (XEXP (var, 0));
1898
1899 if (! validate_change (insn, loc, sub, 0))
1900 {
1901 rtx y = gen_reg_rtx (GET_MODE (sub));
1902 rtx seq, new_insn;
1903
1904 /* We should be able to replace with a register or all is lost.
1905 Note that we can't use validate_change to verify this, since
1906 we're not caring for replacing all dups simultaneously. */
1907 if (! validate_replace_rtx (*loc, y, insn))
1908 abort ();
1909
1910 /* Careful! First try to recognize a direct move of the
1911 value, mimicking how things are done in gen_reload wrt
1912 PLUS. Consider what happens when insn is a conditional
1913 move instruction and addsi3 clobbers flags. */
1914
1915 start_sequence ();
1916 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1917 seq = gen_sequence ();
1918 end_sequence ();
1919
1920 if (recog_memoized (new_insn) < 0)
1921 {
1922 /* That failed. Fall back on force_operand and hope. */
1923
1924 start_sequence ();
1925 force_operand (sub, y);
1926 seq = gen_sequence ();
1927 end_sequence ();
1928 }
1929
1930 #ifdef HAVE_cc0
1931 /* Don't separate setter from user. */
1932 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1933 insn = PREV_INSN (insn);
1934 #endif
1935
1936 emit_insn_before (seq, insn);
1937 }
1938 }
1939 return;
1940
1941 case MEM:
1942 if (var == x)
1943 {
1944 /* If we already have a replacement, use it. Otherwise,
1945 try to fix up this address in case it is invalid. */
1946
1947 replacement = find_fixup_replacement (replacements, var);
1948 if (replacement->new)
1949 {
1950 *loc = replacement->new;
1951 return;
1952 }
1953
1954 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1955
1956 /* Unless we are forcing memory to register or we changed the mode,
1957 we can leave things the way they are if the insn is valid. */
1958
1959 INSN_CODE (insn) = -1;
1960 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1961 && recog_memoized (insn) >= 0)
1962 return;
1963
1964 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1965 return;
1966 }
1967
1968 /* If X contains VAR, we need to unshare it here so that we update
1969 each occurrence separately. But all identical MEMs in one insn
1970 must be replaced with the same rtx because of the possibility of
1971 MATCH_DUPs. */
1972
1973 if (reg_mentioned_p (var, x))
1974 {
1975 replacement = find_fixup_replacement (replacements, x);
1976 if (replacement->new == 0)
1977 replacement->new = copy_most_rtx (x, var);
1978
1979 *loc = x = replacement->new;
1980 }
1981 break;
1982
1983 case REG:
1984 case CC0:
1985 case PC:
1986 case CONST_INT:
1987 case CONST:
1988 case SYMBOL_REF:
1989 case LABEL_REF:
1990 case CONST_DOUBLE:
1991 return;
1992
1993 case SIGN_EXTRACT:
1994 case ZERO_EXTRACT:
1995 /* Note that in some cases those types of expressions are altered
1996 by optimize_bit_field, and do not survive to get here. */
1997 if (XEXP (x, 0) == var
1998 || (GET_CODE (XEXP (x, 0)) == SUBREG
1999 && SUBREG_REG (XEXP (x, 0)) == var))
2000 {
2001 /* Get TEM as a valid MEM in the mode presently in the insn.
2002
2003 We don't worry about the possibility of MATCH_DUP here; it
2004 is highly unlikely and would be tricky to handle. */
2005
2006 tem = XEXP (x, 0);
2007 if (GET_CODE (tem) == SUBREG)
2008 {
2009 if (GET_MODE_BITSIZE (GET_MODE (tem))
2010 > GET_MODE_BITSIZE (GET_MODE (var)))
2011 {
2012 replacement = find_fixup_replacement (replacements, var);
2013 if (replacement->new == 0)
2014 replacement->new = gen_reg_rtx (GET_MODE (var));
2015 SUBREG_REG (tem) = replacement->new;
2016 }
2017 else
2018 tem = fixup_memory_subreg (tem, insn, 0);
2019 }
2020 else
2021 tem = fixup_stack_1 (tem, insn);
2022
2023 /* Unless we want to load from memory, get TEM into the proper mode
2024 for an extract from memory. This can only be done if the
2025 extract is at a constant position and length. */
2026
2027 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2028 && GET_CODE (XEXP (x, 2)) == CONST_INT
2029 && ! mode_dependent_address_p (XEXP (tem, 0))
2030 && ! MEM_VOLATILE_P (tem))
2031 {
2032 enum machine_mode wanted_mode = VOIDmode;
2033 enum machine_mode is_mode = GET_MODE (tem);
2034 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2035
2036 #ifdef HAVE_extzv
2037 if (GET_CODE (x) == ZERO_EXTRACT)
2038 {
2039 wanted_mode
2040 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
2041 if (wanted_mode == VOIDmode)
2042 wanted_mode = word_mode;
2043 }
2044 #endif
2045 #ifdef HAVE_extv
2046 if (GET_CODE (x) == SIGN_EXTRACT)
2047 {
2048 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
2049 if (wanted_mode == VOIDmode)
2050 wanted_mode = word_mode;
2051 }
2052 #endif
2053 /* If we have a narrower mode, we can do something. */
2054 if (wanted_mode != VOIDmode
2055 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2056 {
2057 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2058 rtx old_pos = XEXP (x, 2);
2059 rtx newmem;
2060
2061 /* If the bytes and bits are counted differently, we
2062 must adjust the offset. */
2063 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2064 offset = (GET_MODE_SIZE (is_mode)
2065 - GET_MODE_SIZE (wanted_mode) - offset);
2066
2067 pos %= GET_MODE_BITSIZE (wanted_mode);
2068
2069 newmem = gen_rtx_MEM (wanted_mode,
2070 plus_constant (XEXP (tem, 0), offset));
2071 MEM_COPY_ATTRIBUTES (newmem, tem);
2072
2073 /* Make the change and see if the insn remains valid. */
2074 INSN_CODE (insn) = -1;
2075 XEXP (x, 0) = newmem;
2076 XEXP (x, 2) = GEN_INT (pos);
2077
2078 if (recog_memoized (insn) >= 0)
2079 return;
2080
2081 /* Otherwise, restore old position. XEXP (x, 0) will be
2082 restored later. */
2083 XEXP (x, 2) = old_pos;
2084 }
2085 }
2086
2087 /* If we get here, the bitfield extract insn can't accept a memory
2088 reference. Copy the input into a register. */
2089
2090 tem1 = gen_reg_rtx (GET_MODE (tem));
2091 emit_insn_before (gen_move_insn (tem1, tem), insn);
2092 XEXP (x, 0) = tem1;
2093 return;
2094 }
2095 break;
2096
2097 case SUBREG:
2098 if (SUBREG_REG (x) == var)
2099 {
2100 /* If this is a special SUBREG made because VAR was promoted
2101 from a wider mode, replace it with VAR and call ourself
2102 recursively, this time saying that the object previously
2103 had its current mode (by virtue of the SUBREG). */
2104
2105 if (SUBREG_PROMOTED_VAR_P (x))
2106 {
2107 *loc = var;
2108 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2109 return;
2110 }
2111
2112 /* If this SUBREG makes VAR wider, it has become a paradoxical
2113 SUBREG with VAR in memory, but these aren't allowed at this
2114 stage of the compilation. So load VAR into a pseudo and take
2115 a SUBREG of that pseudo. */
2116 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2117 {
2118 replacement = find_fixup_replacement (replacements, var);
2119 if (replacement->new == 0)
2120 replacement->new = gen_reg_rtx (GET_MODE (var));
2121 SUBREG_REG (x) = replacement->new;
2122 return;
2123 }
2124
2125 /* See if we have already found a replacement for this SUBREG.
2126 If so, use it. Otherwise, make a MEM and see if the insn
2127 is recognized. If not, or if we should force MEM into a register,
2128 make a pseudo for this SUBREG. */
2129 replacement = find_fixup_replacement (replacements, x);
2130 if (replacement->new)
2131 {
2132 *loc = replacement->new;
2133 return;
2134 }
2135
2136 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2137
2138 INSN_CODE (insn) = -1;
2139 if (! flag_force_mem && recog_memoized (insn) >= 0)
2140 return;
2141
2142 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2143 return;
2144 }
2145 break;
2146
2147 case SET:
2148 /* First do special simplification of bit-field references. */
2149 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2150 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2151 optimize_bit_field (x, insn, 0);
2152 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2153 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2154 optimize_bit_field (x, insn, NULL_PTR);
2155
2156 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2157 into a register and then store it back out. */
2158 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2159 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2160 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2161 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2162 > GET_MODE_SIZE (GET_MODE (var))))
2163 {
2164 replacement = find_fixup_replacement (replacements, var);
2165 if (replacement->new == 0)
2166 replacement->new = gen_reg_rtx (GET_MODE (var));
2167
2168 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2169 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2170 }
2171
2172 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2173 insn into a pseudo and store the low part of the pseudo into VAR. */
2174 if (GET_CODE (SET_DEST (x)) == SUBREG
2175 && SUBREG_REG (SET_DEST (x)) == var
2176 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2177 > GET_MODE_SIZE (GET_MODE (var))))
2178 {
2179 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2180 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2181 tem)),
2182 insn);
2183 break;
2184 }
2185
2186 {
2187 rtx dest = SET_DEST (x);
2188 rtx src = SET_SRC (x);
2189 #ifdef HAVE_insv
2190 rtx outerdest = dest;
2191 #endif
2192
2193 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2194 || GET_CODE (dest) == SIGN_EXTRACT
2195 || GET_CODE (dest) == ZERO_EXTRACT)
2196 dest = XEXP (dest, 0);
2197
2198 if (GET_CODE (src) == SUBREG)
2199 src = XEXP (src, 0);
2200
2201 /* If VAR does not appear at the top level of the SET
2202 just scan the lower levels of the tree. */
2203
2204 if (src != var && dest != var)
2205 break;
2206
2207 /* We will need to rerecognize this insn. */
2208 INSN_CODE (insn) = -1;
2209
2210 #ifdef HAVE_insv
2211 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2212 {
2213 /* Since this case will return, ensure we fixup all the
2214 operands here. */
2215 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2216 insn, replacements);
2217 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2218 insn, replacements);
2219 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2220 insn, replacements);
2221
2222 tem = XEXP (outerdest, 0);
2223
2224 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2225 that may appear inside a ZERO_EXTRACT.
2226 This was legitimate when the MEM was a REG. */
2227 if (GET_CODE (tem) == SUBREG
2228 && SUBREG_REG (tem) == var)
2229 tem = fixup_memory_subreg (tem, insn, 0);
2230 else
2231 tem = fixup_stack_1 (tem, insn);
2232
2233 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2234 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2235 && ! mode_dependent_address_p (XEXP (tem, 0))
2236 && ! MEM_VOLATILE_P (tem))
2237 {
2238 enum machine_mode wanted_mode;
2239 enum machine_mode is_mode = GET_MODE (tem);
2240 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2241
2242 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2243 if (wanted_mode == VOIDmode)
2244 wanted_mode = word_mode;
2245
2246 /* If we have a narrower mode, we can do something. */
2247 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2248 {
2249 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2250 rtx old_pos = XEXP (outerdest, 2);
2251 rtx newmem;
2252
2253 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2254 offset = (GET_MODE_SIZE (is_mode)
2255 - GET_MODE_SIZE (wanted_mode) - offset);
2256
2257 pos %= GET_MODE_BITSIZE (wanted_mode);
2258
2259 newmem = gen_rtx_MEM (wanted_mode,
2260 plus_constant (XEXP (tem, 0),
2261 offset));
2262 MEM_COPY_ATTRIBUTES (newmem, tem);
2263
2264 /* Make the change and see if the insn remains valid. */
2265 INSN_CODE (insn) = -1;
2266 XEXP (outerdest, 0) = newmem;
2267 XEXP (outerdest, 2) = GEN_INT (pos);
2268
2269 if (recog_memoized (insn) >= 0)
2270 return;
2271
2272 /* Otherwise, restore old position. XEXP (x, 0) will be
2273 restored later. */
2274 XEXP (outerdest, 2) = old_pos;
2275 }
2276 }
2277
2278 /* If we get here, the bit-field store doesn't allow memory
2279 or isn't located at a constant position. Load the value into
2280 a register, do the store, and put it back into memory. */
2281
2282 tem1 = gen_reg_rtx (GET_MODE (tem));
2283 emit_insn_before (gen_move_insn (tem1, tem), insn);
2284 emit_insn_after (gen_move_insn (tem, tem1), insn);
2285 XEXP (outerdest, 0) = tem1;
2286 return;
2287 }
2288 #endif
2289
2290 /* STRICT_LOW_PART is a no-op on memory references
2291 and it can cause combinations to be unrecognizable,
2292 so eliminate it. */
2293
2294 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2295 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2296
2297 /* A valid insn to copy VAR into or out of a register
2298 must be left alone, to avoid an infinite loop here.
2299 If the reference to VAR is by a subreg, fix that up,
2300 since SUBREG is not valid for a memref.
2301 Also fix up the address of the stack slot.
2302
2303 Note that we must not try to recognize the insn until
2304 after we know that we have valid addresses and no
2305 (subreg (mem ...) ...) constructs, since these interfere
2306 with determining the validity of the insn. */
2307
2308 if ((SET_SRC (x) == var
2309 || (GET_CODE (SET_SRC (x)) == SUBREG
2310 && SUBREG_REG (SET_SRC (x)) == var))
2311 && (GET_CODE (SET_DEST (x)) == REG
2312 || (GET_CODE (SET_DEST (x)) == SUBREG
2313 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2314 && GET_MODE (var) == promoted_mode
2315 && x == single_set (insn))
2316 {
2317 rtx pat;
2318
2319 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2320 if (replacement->new)
2321 SET_SRC (x) = replacement->new;
2322 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2323 SET_SRC (x) = replacement->new
2324 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2325 else
2326 SET_SRC (x) = replacement->new
2327 = fixup_stack_1 (SET_SRC (x), insn);
2328
2329 if (recog_memoized (insn) >= 0)
2330 return;
2331
2332 /* INSN is not valid, but we know that we want to
2333 copy SET_SRC (x) to SET_DEST (x) in some way. So
2334 we generate the move and see whether it requires more
2335 than one insn. If it does, we emit those insns and
2336 delete INSN. Otherwise, we an just replace the pattern
2337 of INSN; we have already verified above that INSN has
2338 no other function that to do X. */
2339
2340 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2341 if (GET_CODE (pat) == SEQUENCE)
2342 {
2343 emit_insn_after (pat, insn);
2344 PUT_CODE (insn, NOTE);
2345 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2346 NOTE_SOURCE_FILE (insn) = 0;
2347 }
2348 else
2349 PATTERN (insn) = pat;
2350
2351 return;
2352 }
2353
2354 if ((SET_DEST (x) == var
2355 || (GET_CODE (SET_DEST (x)) == SUBREG
2356 && SUBREG_REG (SET_DEST (x)) == var))
2357 && (GET_CODE (SET_SRC (x)) == REG
2358 || (GET_CODE (SET_SRC (x)) == SUBREG
2359 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2360 && GET_MODE (var) == promoted_mode
2361 && x == single_set (insn))
2362 {
2363 rtx pat;
2364
2365 if (GET_CODE (SET_DEST (x)) == SUBREG)
2366 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2367 else
2368 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2369
2370 if (recog_memoized (insn) >= 0)
2371 return;
2372
2373 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2374 if (GET_CODE (pat) == SEQUENCE)
2375 {
2376 emit_insn_after (pat, insn);
2377 PUT_CODE (insn, NOTE);
2378 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2379 NOTE_SOURCE_FILE (insn) = 0;
2380 }
2381 else
2382 PATTERN (insn) = pat;
2383
2384 return;
2385 }
2386
2387 /* Otherwise, storing into VAR must be handled specially
2388 by storing into a temporary and copying that into VAR
2389 with a new insn after this one. Note that this case
2390 will be used when storing into a promoted scalar since
2391 the insn will now have different modes on the input
2392 and output and hence will be invalid (except for the case
2393 of setting it to a constant, which does not need any
2394 change if it is valid). We generate extra code in that case,
2395 but combine.c will eliminate it. */
2396
2397 if (dest == var)
2398 {
2399 rtx temp;
2400 rtx fixeddest = SET_DEST (x);
2401
2402 /* STRICT_LOW_PART can be discarded, around a MEM. */
2403 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2404 fixeddest = XEXP (fixeddest, 0);
2405 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2406 if (GET_CODE (fixeddest) == SUBREG)
2407 {
2408 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2409 promoted_mode = GET_MODE (fixeddest);
2410 }
2411 else
2412 fixeddest = fixup_stack_1 (fixeddest, insn);
2413
2414 temp = gen_reg_rtx (promoted_mode);
2415
2416 emit_insn_after (gen_move_insn (fixeddest,
2417 gen_lowpart (GET_MODE (fixeddest),
2418 temp)),
2419 insn);
2420
2421 SET_DEST (x) = temp;
2422 }
2423 }
2424
2425 default:
2426 break;
2427 }
2428
2429 /* Nothing special about this RTX; fix its operands. */
2430
2431 fmt = GET_RTX_FORMAT (code);
2432 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2433 {
2434 if (fmt[i] == 'e')
2435 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2436 else if (fmt[i] == 'E')
2437 {
2438 register int j;
2439 for (j = 0; j < XVECLEN (x, i); j++)
2440 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2441 insn, replacements);
2442 }
2443 }
2444 }
2445 \f
2446 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2447 return an rtx (MEM:m1 newaddr) which is equivalent.
2448 If any insns must be emitted to compute NEWADDR, put them before INSN.
2449
2450 UNCRITICAL nonzero means accept paradoxical subregs.
2451 This is used for subregs found inside REG_NOTES. */
2452
2453 static rtx
2454 fixup_memory_subreg (x, insn, uncritical)
2455 rtx x;
2456 rtx insn;
2457 int uncritical;
2458 {
2459 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2460 rtx addr = XEXP (SUBREG_REG (x), 0);
2461 enum machine_mode mode = GET_MODE (x);
2462 rtx result;
2463
2464 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2465 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2466 && ! uncritical)
2467 abort ();
2468
2469 if (BYTES_BIG_ENDIAN)
2470 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2471 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2472 addr = plus_constant (addr, offset);
2473 if (!flag_force_addr && memory_address_p (mode, addr))
2474 /* Shortcut if no insns need be emitted. */
2475 return change_address (SUBREG_REG (x), mode, addr);
2476 start_sequence ();
2477 result = change_address (SUBREG_REG (x), mode, addr);
2478 emit_insn_before (gen_sequence (), insn);
2479 end_sequence ();
2480 return result;
2481 }
2482
2483 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2484 Replace subexpressions of X in place.
2485 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2486 Otherwise return X, with its contents possibly altered.
2487
2488 If any insns must be emitted to compute NEWADDR, put them before INSN.
2489
2490 UNCRITICAL is as in fixup_memory_subreg. */
2491
2492 static rtx
2493 walk_fixup_memory_subreg (x, insn, uncritical)
2494 register rtx x;
2495 rtx insn;
2496 int uncritical;
2497 {
2498 register enum rtx_code code;
2499 register const char *fmt;
2500 register int i;
2501
2502 if (x == 0)
2503 return 0;
2504
2505 code = GET_CODE (x);
2506
2507 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2508 return fixup_memory_subreg (x, insn, uncritical);
2509
2510 /* Nothing special about this RTX; fix its operands. */
2511
2512 fmt = GET_RTX_FORMAT (code);
2513 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2514 {
2515 if (fmt[i] == 'e')
2516 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2517 else if (fmt[i] == 'E')
2518 {
2519 register int j;
2520 for (j = 0; j < XVECLEN (x, i); j++)
2521 XVECEXP (x, i, j)
2522 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2523 }
2524 }
2525 return x;
2526 }
2527 \f
2528 /* For each memory ref within X, if it refers to a stack slot
2529 with an out of range displacement, put the address in a temp register
2530 (emitting new insns before INSN to load these registers)
2531 and alter the memory ref to use that register.
2532 Replace each such MEM rtx with a copy, to avoid clobberage. */
2533
2534 static rtx
2535 fixup_stack_1 (x, insn)
2536 rtx x;
2537 rtx insn;
2538 {
2539 register int i;
2540 register RTX_CODE code = GET_CODE (x);
2541 register const char *fmt;
2542
2543 if (code == MEM)
2544 {
2545 register rtx ad = XEXP (x, 0);
2546 /* If we have address of a stack slot but it's not valid
2547 (displacement is too large), compute the sum in a register. */
2548 if (GET_CODE (ad) == PLUS
2549 && GET_CODE (XEXP (ad, 0)) == REG
2550 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2551 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2552 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2553 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2554 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2555 #endif
2556 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2557 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2558 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2559 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2560 {
2561 rtx temp, seq;
2562 if (memory_address_p (GET_MODE (x), ad))
2563 return x;
2564
2565 start_sequence ();
2566 temp = copy_to_reg (ad);
2567 seq = gen_sequence ();
2568 end_sequence ();
2569 emit_insn_before (seq, insn);
2570 return change_address (x, VOIDmode, temp);
2571 }
2572 return x;
2573 }
2574
2575 fmt = GET_RTX_FORMAT (code);
2576 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2577 {
2578 if (fmt[i] == 'e')
2579 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2580 else if (fmt[i] == 'E')
2581 {
2582 register int j;
2583 for (j = 0; j < XVECLEN (x, i); j++)
2584 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2585 }
2586 }
2587 return x;
2588 }
2589 \f
2590 /* Optimization: a bit-field instruction whose field
2591 happens to be a byte or halfword in memory
2592 can be changed to a move instruction.
2593
2594 We call here when INSN is an insn to examine or store into a bit-field.
2595 BODY is the SET-rtx to be altered.
2596
2597 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2598 (Currently this is called only from function.c, and EQUIV_MEM
2599 is always 0.) */
2600
2601 static void
2602 optimize_bit_field (body, insn, equiv_mem)
2603 rtx body;
2604 rtx insn;
2605 rtx *equiv_mem;
2606 {
2607 register rtx bitfield;
2608 int destflag;
2609 rtx seq = 0;
2610 enum machine_mode mode;
2611
2612 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2613 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2614 bitfield = SET_DEST (body), destflag = 1;
2615 else
2616 bitfield = SET_SRC (body), destflag = 0;
2617
2618 /* First check that the field being stored has constant size and position
2619 and is in fact a byte or halfword suitably aligned. */
2620
2621 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2622 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2623 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2624 != BLKmode)
2625 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2626 {
2627 register rtx memref = 0;
2628
2629 /* Now check that the containing word is memory, not a register,
2630 and that it is safe to change the machine mode. */
2631
2632 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2633 memref = XEXP (bitfield, 0);
2634 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2635 && equiv_mem != 0)
2636 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2637 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2638 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2639 memref = SUBREG_REG (XEXP (bitfield, 0));
2640 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2641 && equiv_mem != 0
2642 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2643 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2644
2645 if (memref
2646 && ! mode_dependent_address_p (XEXP (memref, 0))
2647 && ! MEM_VOLATILE_P (memref))
2648 {
2649 /* Now adjust the address, first for any subreg'ing
2650 that we are now getting rid of,
2651 and then for which byte of the word is wanted. */
2652
2653 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2654 rtx insns;
2655
2656 /* Adjust OFFSET to count bits from low-address byte. */
2657 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2658 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2659 - offset - INTVAL (XEXP (bitfield, 1)));
2660
2661 /* Adjust OFFSET to count bytes from low-address byte. */
2662 offset /= BITS_PER_UNIT;
2663 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2664 {
2665 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2666 if (BYTES_BIG_ENDIAN)
2667 offset -= (MIN (UNITS_PER_WORD,
2668 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2669 - MIN (UNITS_PER_WORD,
2670 GET_MODE_SIZE (GET_MODE (memref))));
2671 }
2672
2673 start_sequence ();
2674 memref = change_address (memref, mode,
2675 plus_constant (XEXP (memref, 0), offset));
2676 insns = get_insns ();
2677 end_sequence ();
2678 emit_insns_before (insns, insn);
2679
2680 /* Store this memory reference where
2681 we found the bit field reference. */
2682
2683 if (destflag)
2684 {
2685 validate_change (insn, &SET_DEST (body), memref, 1);
2686 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2687 {
2688 rtx src = SET_SRC (body);
2689 while (GET_CODE (src) == SUBREG
2690 && SUBREG_WORD (src) == 0)
2691 src = SUBREG_REG (src);
2692 if (GET_MODE (src) != GET_MODE (memref))
2693 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2694 validate_change (insn, &SET_SRC (body), src, 1);
2695 }
2696 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2697 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2698 /* This shouldn't happen because anything that didn't have
2699 one of these modes should have got converted explicitly
2700 and then referenced through a subreg.
2701 This is so because the original bit-field was
2702 handled by agg_mode and so its tree structure had
2703 the same mode that memref now has. */
2704 abort ();
2705 }
2706 else
2707 {
2708 rtx dest = SET_DEST (body);
2709
2710 while (GET_CODE (dest) == SUBREG
2711 && SUBREG_WORD (dest) == 0
2712 && (GET_MODE_CLASS (GET_MODE (dest))
2713 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2714 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2715 <= UNITS_PER_WORD))
2716 dest = SUBREG_REG (dest);
2717
2718 validate_change (insn, &SET_DEST (body), dest, 1);
2719
2720 if (GET_MODE (dest) == GET_MODE (memref))
2721 validate_change (insn, &SET_SRC (body), memref, 1);
2722 else
2723 {
2724 /* Convert the mem ref to the destination mode. */
2725 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2726
2727 start_sequence ();
2728 convert_move (newreg, memref,
2729 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2730 seq = get_insns ();
2731 end_sequence ();
2732
2733 validate_change (insn, &SET_SRC (body), newreg, 1);
2734 }
2735 }
2736
2737 /* See if we can convert this extraction or insertion into
2738 a simple move insn. We might not be able to do so if this
2739 was, for example, part of a PARALLEL.
2740
2741 If we succeed, write out any needed conversions. If we fail,
2742 it is hard to guess why we failed, so don't do anything
2743 special; just let the optimization be suppressed. */
2744
2745 if (apply_change_group () && seq)
2746 emit_insns_before (seq, insn);
2747 }
2748 }
2749 }
2750 \f
2751 /* These routines are responsible for converting virtual register references
2752 to the actual hard register references once RTL generation is complete.
2753
2754 The following four variables are used for communication between the
2755 routines. They contain the offsets of the virtual registers from their
2756 respective hard registers. */
2757
2758 static int in_arg_offset;
2759 static int var_offset;
2760 static int dynamic_offset;
2761 static int out_arg_offset;
2762 static int cfa_offset;
2763
2764 /* In most machines, the stack pointer register is equivalent to the bottom
2765 of the stack. */
2766
2767 #ifndef STACK_POINTER_OFFSET
2768 #define STACK_POINTER_OFFSET 0
2769 #endif
2770
2771 /* If not defined, pick an appropriate default for the offset of dynamically
2772 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2773 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2774
2775 #ifndef STACK_DYNAMIC_OFFSET
2776
2777 /* The bottom of the stack points to the actual arguments. If
2778 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2779 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2780 stack space for register parameters is not pushed by the caller, but
2781 rather part of the fixed stack areas and hence not included in
2782 `current_function_outgoing_args_size'. Nevertheless, we must allow
2783 for it when allocating stack dynamic objects. */
2784
2785 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2786 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2787 ((ACCUMULATE_OUTGOING_ARGS \
2788 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2789 + (STACK_POINTER_OFFSET)) \
2790
2791 #else
2792 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2793 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2794 + (STACK_POINTER_OFFSET))
2795 #endif
2796 #endif
2797
2798 /* On most machines, the CFA coincides with the first incoming parm. */
2799
2800 #ifndef ARG_POINTER_CFA_OFFSET
2801 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2802 #endif
2803
2804
2805 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2806 its address taken. DECL is the decl for the object stored in the
2807 register, for later use if we do need to force REG into the stack.
2808 REG is overwritten by the MEM like in put_reg_into_stack. */
2809
2810 rtx
2811 gen_mem_addressof (reg, decl)
2812 rtx reg;
2813 tree decl;
2814 {
2815 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2816 REGNO (reg), decl);
2817
2818 /* If the original REG was a user-variable, then so is the REG whose
2819 address is being taken. Likewise for unchanging. */
2820 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2821 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2822
2823 PUT_CODE (reg, MEM);
2824 XEXP (reg, 0) = r;
2825 if (decl)
2826 {
2827 tree type = TREE_TYPE (decl);
2828
2829 PUT_MODE (reg, DECL_MODE (decl));
2830 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2831 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2832 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2833
2834 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2835 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2836 }
2837 else
2838 fixup_var_refs (reg, GET_MODE (reg), 0, 0);
2839
2840 return reg;
2841 }
2842
2843 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2844
2845 void
2846 flush_addressof (decl)
2847 tree decl;
2848 {
2849 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2850 && DECL_RTL (decl) != 0
2851 && GET_CODE (DECL_RTL (decl)) == MEM
2852 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2853 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2854 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2855 }
2856
2857 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2858
2859 static void
2860 put_addressof_into_stack (r, ht)
2861 rtx r;
2862 struct hash_table *ht;
2863 {
2864 tree decl, type;
2865 int volatile_p, used_p;
2866
2867 rtx reg = XEXP (r, 0);
2868
2869 if (GET_CODE (reg) != REG)
2870 abort ();
2871
2872 decl = ADDRESSOF_DECL (r);
2873 if (decl)
2874 {
2875 type = TREE_TYPE (decl);
2876 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2877 && TREE_THIS_VOLATILE (decl));
2878 used_p = (TREE_USED (decl)
2879 || (TREE_CODE (decl) != SAVE_EXPR
2880 && DECL_INITIAL (decl) != 0));
2881 }
2882 else
2883 {
2884 type = NULL_TREE;
2885 volatile_p = 0;
2886 used_p = 1;
2887 }
2888
2889 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2890 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2891 }
2892
2893 /* List of replacements made below in purge_addressof_1 when creating
2894 bitfield insertions. */
2895 static rtx purge_bitfield_addressof_replacements;
2896
2897 /* List of replacements made below in purge_addressof_1 for patterns
2898 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2899 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2900 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2901 enough in complex cases, e.g. when some field values can be
2902 extracted by usage MEM with narrower mode. */
2903 static rtx purge_addressof_replacements;
2904
2905 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2906 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2907 the stack. If the function returns FALSE then the replacement could not
2908 be made. */
2909
2910 static boolean
2911 purge_addressof_1 (loc, insn, force, store, ht)
2912 rtx *loc;
2913 rtx insn;
2914 int force, store;
2915 struct hash_table *ht;
2916 {
2917 rtx x;
2918 RTX_CODE code;
2919 int i, j;
2920 const char *fmt;
2921 boolean result = true;
2922
2923 /* Re-start here to avoid recursion in common cases. */
2924 restart:
2925
2926 x = *loc;
2927 if (x == 0)
2928 return true;
2929
2930 code = GET_CODE (x);
2931
2932 /* If we don't return in any of the cases below, we will recurse inside
2933 the RTX, which will normally result in any ADDRESSOF being forced into
2934 memory. */
2935 if (code == SET)
2936 {
2937 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2938 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2939 return result;
2940 }
2941
2942 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2943 {
2944 /* We must create a copy of the rtx because it was created by
2945 overwriting a REG rtx which is always shared. */
2946 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2947 rtx insns;
2948
2949 if (validate_change (insn, loc, sub, 0)
2950 || validate_replace_rtx (x, sub, insn))
2951 return true;
2952
2953 start_sequence ();
2954 sub = force_operand (sub, NULL_RTX);
2955 if (! validate_change (insn, loc, sub, 0)
2956 && ! validate_replace_rtx (x, sub, insn))
2957 abort ();
2958
2959 insns = gen_sequence ();
2960 end_sequence ();
2961 emit_insn_before (insns, insn);
2962 return true;
2963 }
2964
2965 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2966 {
2967 rtx sub = XEXP (XEXP (x, 0), 0);
2968 rtx sub2;
2969
2970 if (GET_CODE (sub) == MEM)
2971 {
2972 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2973 MEM_COPY_ATTRIBUTES (sub2, sub);
2974 sub = sub2;
2975 }
2976 else if (GET_CODE (sub) == REG
2977 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2978 ;
2979 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2980 {
2981 int size_x, size_sub;
2982
2983 if (!insn)
2984 {
2985 /* When processing REG_NOTES look at the list of
2986 replacements done on the insn to find the register that X
2987 was replaced by. */
2988 rtx tem;
2989
2990 for (tem = purge_bitfield_addressof_replacements;
2991 tem != NULL_RTX;
2992 tem = XEXP (XEXP (tem, 1), 1))
2993 if (rtx_equal_p (x, XEXP (tem, 0)))
2994 {
2995 *loc = XEXP (XEXP (tem, 1), 0);
2996 return true;
2997 }
2998
2999 /* See comment for purge_addressof_replacements. */
3000 for (tem = purge_addressof_replacements;
3001 tem != NULL_RTX;
3002 tem = XEXP (XEXP (tem, 1), 1))
3003 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3004 {
3005 rtx z = XEXP (XEXP (tem, 1), 0);
3006
3007 if (GET_MODE (x) == GET_MODE (z)
3008 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3009 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3010 abort ();
3011
3012 /* It can happen that the note may speak of things
3013 in a wider (or just different) mode than the
3014 code did. This is especially true of
3015 REG_RETVAL. */
3016
3017 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
3018 z = SUBREG_REG (z);
3019
3020 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3021 && (GET_MODE_SIZE (GET_MODE (x))
3022 > GET_MODE_SIZE (GET_MODE (z))))
3023 {
3024 /* This can occur as a result in invalid
3025 pointer casts, e.g. float f; ...
3026 *(long long int *)&f.
3027 ??? We could emit a warning here, but
3028 without a line number that wouldn't be
3029 very helpful. */
3030 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3031 }
3032 else
3033 z = gen_lowpart (GET_MODE (x), z);
3034
3035 *loc = z;
3036 return true;
3037 }
3038
3039 /* Sometimes we may not be able to find the replacement. For
3040 example when the original insn was a MEM in a wider mode,
3041 and the note is part of a sign extension of a narrowed
3042 version of that MEM. Gcc testcase compile/990829-1.c can
3043 generate an example of this siutation. Rather than complain
3044 we return false, which will prompt our caller to remove the
3045 offending note. */
3046 return false;
3047 }
3048
3049 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3050 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3051
3052 /* Don't even consider working with paradoxical subregs,
3053 or the moral equivalent seen here. */
3054 if (size_x <= size_sub
3055 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3056 {
3057 /* Do a bitfield insertion to mirror what would happen
3058 in memory. */
3059
3060 rtx val, seq;
3061
3062 if (store)
3063 {
3064 rtx p = PREV_INSN (insn);
3065
3066 start_sequence ();
3067 val = gen_reg_rtx (GET_MODE (x));
3068 if (! validate_change (insn, loc, val, 0))
3069 {
3070 /* Discard the current sequence and put the
3071 ADDRESSOF on stack. */
3072 end_sequence ();
3073 goto give_up;
3074 }
3075 seq = gen_sequence ();
3076 end_sequence ();
3077 emit_insn_before (seq, insn);
3078 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3079 insn, ht);
3080
3081 start_sequence ();
3082 store_bit_field (sub, size_x, 0, GET_MODE (x),
3083 val, GET_MODE_SIZE (GET_MODE (sub)),
3084 GET_MODE_ALIGNMENT (GET_MODE (sub)));
3085
3086 /* Make sure to unshare any shared rtl that store_bit_field
3087 might have created. */
3088 unshare_all_rtl_again (get_insns ());
3089
3090 seq = gen_sequence ();
3091 end_sequence ();
3092 p = emit_insn_after (seq, insn);
3093 if (NEXT_INSN (insn))
3094 compute_insns_for_mem (NEXT_INSN (insn),
3095 p ? NEXT_INSN (p) : NULL_RTX,
3096 ht);
3097 }
3098 else
3099 {
3100 rtx p = PREV_INSN (insn);
3101
3102 start_sequence ();
3103 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3104 GET_MODE (x), GET_MODE (x),
3105 GET_MODE_SIZE (GET_MODE (sub)),
3106 GET_MODE_SIZE (GET_MODE (sub)));
3107
3108 if (! validate_change (insn, loc, val, 0))
3109 {
3110 /* Discard the current sequence and put the
3111 ADDRESSOF on stack. */
3112 end_sequence ();
3113 goto give_up;
3114 }
3115
3116 seq = gen_sequence ();
3117 end_sequence ();
3118 emit_insn_before (seq, insn);
3119 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3120 insn, ht);
3121 }
3122
3123 /* Remember the replacement so that the same one can be done
3124 on the REG_NOTES. */
3125 purge_bitfield_addressof_replacements
3126 = gen_rtx_EXPR_LIST (VOIDmode, x,
3127 gen_rtx_EXPR_LIST
3128 (VOIDmode, val,
3129 purge_bitfield_addressof_replacements));
3130
3131 /* We replaced with a reg -- all done. */
3132 return true;
3133 }
3134 }
3135
3136 else if (validate_change (insn, loc, sub, 0))
3137 {
3138 /* Remember the replacement so that the same one can be done
3139 on the REG_NOTES. */
3140 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3141 {
3142 rtx tem;
3143
3144 for (tem = purge_addressof_replacements;
3145 tem != NULL_RTX;
3146 tem = XEXP (XEXP (tem, 1), 1))
3147 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3148 {
3149 XEXP (XEXP (tem, 1), 0) = sub;
3150 return true;
3151 }
3152 purge_addressof_replacements
3153 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3154 gen_rtx_EXPR_LIST (VOIDmode, sub,
3155 purge_addressof_replacements));
3156 return true;
3157 }
3158 goto restart;
3159 }
3160 give_up:;
3161 /* else give up and put it into the stack */
3162 }
3163
3164 else if (code == ADDRESSOF)
3165 {
3166 put_addressof_into_stack (x, ht);
3167 return true;
3168 }
3169 else if (code == SET)
3170 {
3171 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3172 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3173 return result;
3174 }
3175
3176 /* Scan all subexpressions. */
3177 fmt = GET_RTX_FORMAT (code);
3178 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3179 {
3180 if (*fmt == 'e')
3181 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3182 else if (*fmt == 'E')
3183 for (j = 0; j < XVECLEN (x, i); j++)
3184 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3185 }
3186
3187 return result;
3188 }
3189
3190 /* Return a new hash table entry in HT. */
3191
3192 static struct hash_entry *
3193 insns_for_mem_newfunc (he, ht, k)
3194 struct hash_entry *he;
3195 struct hash_table *ht;
3196 hash_table_key k ATTRIBUTE_UNUSED;
3197 {
3198 struct insns_for_mem_entry *ifmhe;
3199 if (he)
3200 return he;
3201
3202 ifmhe = ((struct insns_for_mem_entry *)
3203 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3204 ifmhe->insns = NULL_RTX;
3205
3206 return &ifmhe->he;
3207 }
3208
3209 /* Return a hash value for K, a REG. */
3210
3211 static unsigned long
3212 insns_for_mem_hash (k)
3213 hash_table_key k;
3214 {
3215 /* K is really a RTX. Just use the address as the hash value. */
3216 return (unsigned long) k;
3217 }
3218
3219 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3220
3221 static boolean
3222 insns_for_mem_comp (k1, k2)
3223 hash_table_key k1;
3224 hash_table_key k2;
3225 {
3226 return k1 == k2;
3227 }
3228
3229 struct insns_for_mem_walk_info {
3230 /* The hash table that we are using to record which INSNs use which
3231 MEMs. */
3232 struct hash_table *ht;
3233
3234 /* The INSN we are currently proessing. */
3235 rtx insn;
3236
3237 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3238 to find the insns that use the REGs in the ADDRESSOFs. */
3239 int pass;
3240 };
3241
3242 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3243 that might be used in an ADDRESSOF expression, record this INSN in
3244 the hash table given by DATA (which is really a pointer to an
3245 insns_for_mem_walk_info structure). */
3246
3247 static int
3248 insns_for_mem_walk (r, data)
3249 rtx *r;
3250 void *data;
3251 {
3252 struct insns_for_mem_walk_info *ifmwi
3253 = (struct insns_for_mem_walk_info *) data;
3254
3255 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3256 && GET_CODE (XEXP (*r, 0)) == REG)
3257 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3258 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3259 {
3260 /* Lookup this MEM in the hashtable, creating it if necessary. */
3261 struct insns_for_mem_entry *ifme
3262 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3263 *r,
3264 /*create=*/0,
3265 /*copy=*/0);
3266
3267 /* If we have not already recorded this INSN, do so now. Since
3268 we process the INSNs in order, we know that if we have
3269 recorded it it must be at the front of the list. */
3270 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3271 {
3272 /* We do the allocation on the same obstack as is used for
3273 the hash table since this memory will not be used once
3274 the hash table is deallocated. */
3275 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3276 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3277 ifme->insns);
3278 pop_obstacks ();
3279 }
3280 }
3281
3282 return 0;
3283 }
3284
3285 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3286 which REGs in HT. */
3287
3288 static void
3289 compute_insns_for_mem (insns, last_insn, ht)
3290 rtx insns;
3291 rtx last_insn;
3292 struct hash_table *ht;
3293 {
3294 rtx insn;
3295 struct insns_for_mem_walk_info ifmwi;
3296 ifmwi.ht = ht;
3297
3298 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3299 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3300 if (INSN_P (insn))
3301 {
3302 ifmwi.insn = insn;
3303 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3304 }
3305 }
3306
3307 /* Helper function for purge_addressof called through for_each_rtx.
3308 Returns true iff the rtl is an ADDRESSOF. */
3309 static int
3310 is_addressof (rtl, data)
3311 rtx * rtl;
3312 void * data ATTRIBUTE_UNUSED;
3313 {
3314 return GET_CODE (* rtl) == ADDRESSOF;
3315 }
3316
3317 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3318 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3319 stack. */
3320
3321 void
3322 purge_addressof (insns)
3323 rtx insns;
3324 {
3325 rtx insn;
3326 struct hash_table ht;
3327
3328 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3329 requires a fixup pass over the instruction stream to correct
3330 INSNs that depended on the REG being a REG, and not a MEM. But,
3331 these fixup passes are slow. Furthermore, most MEMs are not
3332 mentioned in very many instructions. So, we speed up the process
3333 by pre-calculating which REGs occur in which INSNs; that allows
3334 us to perform the fixup passes much more quickly. */
3335 hash_table_init (&ht,
3336 insns_for_mem_newfunc,
3337 insns_for_mem_hash,
3338 insns_for_mem_comp);
3339 compute_insns_for_mem (insns, NULL_RTX, &ht);
3340
3341 for (insn = insns; insn; insn = NEXT_INSN (insn))
3342 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3343 || GET_CODE (insn) == CALL_INSN)
3344 {
3345 if (! purge_addressof_1 (&PATTERN (insn), insn,
3346 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3347 /* If we could not replace the ADDRESSOFs in the insn,
3348 something is wrong. */
3349 abort ();
3350
3351 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3352 {
3353 /* If we could not replace the ADDRESSOFs in the insn's notes,
3354 we can just remove the offending notes instead. */
3355 rtx note;
3356
3357 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3358 {
3359 /* If we find a REG_RETVAL note then the insn is a libcall.
3360 Such insns must have REG_EQUAL notes as well, in order
3361 for later passes of the compiler to work. So it is not
3362 safe to delete the notes here, and instead we abort. */
3363 if (REG_NOTE_KIND (note) == REG_RETVAL)
3364 abort ();
3365 if (for_each_rtx (& note, is_addressof, NULL))
3366 remove_note (insn, note);
3367 }
3368 }
3369 }
3370
3371 /* Clean up. */
3372 hash_table_free (&ht);
3373 purge_bitfield_addressof_replacements = 0;
3374 purge_addressof_replacements = 0;
3375
3376 /* REGs are shared. purge_addressof will destructively replace a REG
3377 with a MEM, which creates shared MEMs.
3378
3379 Unfortunately, the children of put_reg_into_stack assume that MEMs
3380 referring to the same stack slot are shared (fixup_var_refs and
3381 the associated hash table code).
3382
3383 So, we have to do another unsharing pass after we have flushed any
3384 REGs that had their address taken into the stack.
3385
3386 It may be worth tracking whether or not we converted any REGs into
3387 MEMs to avoid this overhead when it is not needed. */
3388 unshare_all_rtl_again (get_insns ());
3389 }
3390 \f
3391 /* Pass through the INSNS of function FNDECL and convert virtual register
3392 references to hard register references. */
3393
3394 void
3395 instantiate_virtual_regs (fndecl, insns)
3396 tree fndecl;
3397 rtx insns;
3398 {
3399 rtx insn;
3400 unsigned int i;
3401
3402 /* Compute the offsets to use for this function. */
3403 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3404 var_offset = STARTING_FRAME_OFFSET;
3405 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3406 out_arg_offset = STACK_POINTER_OFFSET;
3407 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3408
3409 /* Scan all variables and parameters of this function. For each that is
3410 in memory, instantiate all virtual registers if the result is a valid
3411 address. If not, we do it later. That will handle most uses of virtual
3412 regs on many machines. */
3413 instantiate_decls (fndecl, 1);
3414
3415 /* Initialize recognition, indicating that volatile is OK. */
3416 init_recog ();
3417
3418 /* Scan through all the insns, instantiating every virtual register still
3419 present. */
3420 for (insn = insns; insn; insn = NEXT_INSN (insn))
3421 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3422 || GET_CODE (insn) == CALL_INSN)
3423 {
3424 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3425 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3426 }
3427
3428 /* Instantiate the stack slots for the parm registers, for later use in
3429 addressof elimination. */
3430 for (i = 0; i < max_parm_reg; ++i)
3431 if (parm_reg_stack_loc[i])
3432 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3433
3434 /* Now instantiate the remaining register equivalences for debugging info.
3435 These will not be valid addresses. */
3436 instantiate_decls (fndecl, 0);
3437
3438 /* Indicate that, from now on, assign_stack_local should use
3439 frame_pointer_rtx. */
3440 virtuals_instantiated = 1;
3441 }
3442
3443 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3444 all virtual registers in their DECL_RTL's.
3445
3446 If VALID_ONLY, do this only if the resulting address is still valid.
3447 Otherwise, always do it. */
3448
3449 static void
3450 instantiate_decls (fndecl, valid_only)
3451 tree fndecl;
3452 int valid_only;
3453 {
3454 tree decl;
3455
3456 if (DECL_SAVED_INSNS (fndecl))
3457 /* When compiling an inline function, the obstack used for
3458 rtl allocation is the maybepermanent_obstack. Calling
3459 `resume_temporary_allocation' switches us back to that
3460 obstack while we process this function's parameters. */
3461 resume_temporary_allocation ();
3462
3463 /* Process all parameters of the function. */
3464 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3465 {
3466 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3467
3468 instantiate_decl (DECL_RTL (decl), size, valid_only);
3469
3470 /* If the parameter was promoted, then the incoming RTL mode may be
3471 larger than the declared type size. We must use the larger of
3472 the two sizes. */
3473 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3474 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3475 }
3476
3477 /* Now process all variables defined in the function or its subblocks. */
3478 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3479
3480 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3481 {
3482 /* Save all rtl allocated for this function by raising the
3483 high-water mark on the maybepermanent_obstack. */
3484 preserve_data ();
3485 /* All further rtl allocation is now done in the current_obstack. */
3486 rtl_in_current_obstack ();
3487 }
3488 }
3489
3490 /* Subroutine of instantiate_decls: Process all decls in the given
3491 BLOCK node and all its subblocks. */
3492
3493 static void
3494 instantiate_decls_1 (let, valid_only)
3495 tree let;
3496 int valid_only;
3497 {
3498 tree t;
3499
3500 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3501 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3502 valid_only);
3503
3504 /* Process all subblocks. */
3505 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3506 instantiate_decls_1 (t, valid_only);
3507 }
3508
3509 /* Subroutine of the preceding procedures: Given RTL representing a
3510 decl and the size of the object, do any instantiation required.
3511
3512 If VALID_ONLY is non-zero, it means that the RTL should only be
3513 changed if the new address is valid. */
3514
3515 static void
3516 instantiate_decl (x, size, valid_only)
3517 rtx x;
3518 HOST_WIDE_INT size;
3519 int valid_only;
3520 {
3521 enum machine_mode mode;
3522 rtx addr;
3523
3524 /* If this is not a MEM, no need to do anything. Similarly if the
3525 address is a constant or a register that is not a virtual register. */
3526
3527 if (x == 0 || GET_CODE (x) != MEM)
3528 return;
3529
3530 addr = XEXP (x, 0);
3531 if (CONSTANT_P (addr)
3532 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3533 || (GET_CODE (addr) == REG
3534 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3535 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3536 return;
3537
3538 /* If we should only do this if the address is valid, copy the address.
3539 We need to do this so we can undo any changes that might make the
3540 address invalid. This copy is unfortunate, but probably can't be
3541 avoided. */
3542
3543 if (valid_only)
3544 addr = copy_rtx (addr);
3545
3546 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3547
3548 if (valid_only && size >= 0)
3549 {
3550 unsigned HOST_WIDE_INT decl_size = size;
3551
3552 /* Now verify that the resulting address is valid for every integer or
3553 floating-point mode up to and including SIZE bytes long. We do this
3554 since the object might be accessed in any mode and frame addresses
3555 are shared. */
3556
3557 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3558 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3559 mode = GET_MODE_WIDER_MODE (mode))
3560 if (! memory_address_p (mode, addr))
3561 return;
3562
3563 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3564 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3565 mode = GET_MODE_WIDER_MODE (mode))
3566 if (! memory_address_p (mode, addr))
3567 return;
3568 }
3569
3570 /* Put back the address now that we have updated it and we either know
3571 it is valid or we don't care whether it is valid. */
3572
3573 XEXP (x, 0) = addr;
3574 }
3575 \f
3576 /* Given a pointer to a piece of rtx and an optional pointer to the
3577 containing object, instantiate any virtual registers present in it.
3578
3579 If EXTRA_INSNS, we always do the replacement and generate
3580 any extra insns before OBJECT. If it zero, we do nothing if replacement
3581 is not valid.
3582
3583 Return 1 if we either had nothing to do or if we were able to do the
3584 needed replacement. Return 0 otherwise; we only return zero if
3585 EXTRA_INSNS is zero.
3586
3587 We first try some simple transformations to avoid the creation of extra
3588 pseudos. */
3589
3590 static int
3591 instantiate_virtual_regs_1 (loc, object, extra_insns)
3592 rtx *loc;
3593 rtx object;
3594 int extra_insns;
3595 {
3596 rtx x;
3597 RTX_CODE code;
3598 rtx new = 0;
3599 HOST_WIDE_INT offset = 0;
3600 rtx temp;
3601 rtx seq;
3602 int i, j;
3603 const char *fmt;
3604
3605 /* Re-start here to avoid recursion in common cases. */
3606 restart:
3607
3608 x = *loc;
3609 if (x == 0)
3610 return 1;
3611
3612 code = GET_CODE (x);
3613
3614 /* Check for some special cases. */
3615 switch (code)
3616 {
3617 case CONST_INT:
3618 case CONST_DOUBLE:
3619 case CONST:
3620 case SYMBOL_REF:
3621 case CODE_LABEL:
3622 case PC:
3623 case CC0:
3624 case ASM_INPUT:
3625 case ADDR_VEC:
3626 case ADDR_DIFF_VEC:
3627 case RETURN:
3628 return 1;
3629
3630 case SET:
3631 /* We are allowed to set the virtual registers. This means that
3632 the actual register should receive the source minus the
3633 appropriate offset. This is used, for example, in the handling
3634 of non-local gotos. */
3635 if (SET_DEST (x) == virtual_incoming_args_rtx)
3636 new = arg_pointer_rtx, offset = - in_arg_offset;
3637 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3638 new = frame_pointer_rtx, offset = - var_offset;
3639 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3640 new = stack_pointer_rtx, offset = - dynamic_offset;
3641 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3642 new = stack_pointer_rtx, offset = - out_arg_offset;
3643 else if (SET_DEST (x) == virtual_cfa_rtx)
3644 new = arg_pointer_rtx, offset = - cfa_offset;
3645
3646 if (new)
3647 {
3648 rtx src = SET_SRC (x);
3649
3650 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3651
3652 /* The only valid sources here are PLUS or REG. Just do
3653 the simplest possible thing to handle them. */
3654 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3655 abort ();
3656
3657 start_sequence ();
3658 if (GET_CODE (src) != REG)
3659 temp = force_operand (src, NULL_RTX);
3660 else
3661 temp = src;
3662 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3663 seq = get_insns ();
3664 end_sequence ();
3665
3666 emit_insns_before (seq, object);
3667 SET_DEST (x) = new;
3668
3669 if (! validate_change (object, &SET_SRC (x), temp, 0)
3670 || ! extra_insns)
3671 abort ();
3672
3673 return 1;
3674 }
3675
3676 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3677 loc = &SET_SRC (x);
3678 goto restart;
3679
3680 case PLUS:
3681 /* Handle special case of virtual register plus constant. */
3682 if (CONSTANT_P (XEXP (x, 1)))
3683 {
3684 rtx old, new_offset;
3685
3686 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3687 if (GET_CODE (XEXP (x, 0)) == PLUS)
3688 {
3689 rtx inner = XEXP (XEXP (x, 0), 0);
3690
3691 if (inner == virtual_incoming_args_rtx)
3692 new = arg_pointer_rtx, offset = in_arg_offset;
3693 else if (inner == virtual_stack_vars_rtx)
3694 new = frame_pointer_rtx, offset = var_offset;
3695 else if (inner == virtual_stack_dynamic_rtx)
3696 new = stack_pointer_rtx, offset = dynamic_offset;
3697 else if (inner == virtual_outgoing_args_rtx)
3698 new = stack_pointer_rtx, offset = out_arg_offset;
3699 else if (inner == virtual_cfa_rtx)
3700 new = arg_pointer_rtx, offset = cfa_offset;
3701 else
3702 {
3703 loc = &XEXP (x, 0);
3704 goto restart;
3705 }
3706
3707 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3708 extra_insns);
3709 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3710 }
3711
3712 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3713 new = arg_pointer_rtx, offset = in_arg_offset;
3714 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3715 new = frame_pointer_rtx, offset = var_offset;
3716 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3717 new = stack_pointer_rtx, offset = dynamic_offset;
3718 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3719 new = stack_pointer_rtx, offset = out_arg_offset;
3720 else if (XEXP (x, 0) == virtual_cfa_rtx)
3721 new = arg_pointer_rtx, offset = cfa_offset;
3722 else
3723 {
3724 /* We know the second operand is a constant. Unless the
3725 first operand is a REG (which has been already checked),
3726 it needs to be checked. */
3727 if (GET_CODE (XEXP (x, 0)) != REG)
3728 {
3729 loc = &XEXP (x, 0);
3730 goto restart;
3731 }
3732 return 1;
3733 }
3734
3735 new_offset = plus_constant (XEXP (x, 1), offset);
3736
3737 /* If the new constant is zero, try to replace the sum with just
3738 the register. */
3739 if (new_offset == const0_rtx
3740 && validate_change (object, loc, new, 0))
3741 return 1;
3742
3743 /* Next try to replace the register and new offset.
3744 There are two changes to validate here and we can't assume that
3745 in the case of old offset equals new just changing the register
3746 will yield a valid insn. In the interests of a little efficiency,
3747 however, we only call validate change once (we don't queue up the
3748 changes and then call apply_change_group). */
3749
3750 old = XEXP (x, 0);
3751 if (offset == 0
3752 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3753 : (XEXP (x, 0) = new,
3754 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3755 {
3756 if (! extra_insns)
3757 {
3758 XEXP (x, 0) = old;
3759 return 0;
3760 }
3761
3762 /* Otherwise copy the new constant into a register and replace
3763 constant with that register. */
3764 temp = gen_reg_rtx (Pmode);
3765 XEXP (x, 0) = new;
3766 if (validate_change (object, &XEXP (x, 1), temp, 0))
3767 emit_insn_before (gen_move_insn (temp, new_offset), object);
3768 else
3769 {
3770 /* If that didn't work, replace this expression with a
3771 register containing the sum. */
3772
3773 XEXP (x, 0) = old;
3774 new = gen_rtx_PLUS (Pmode, new, new_offset);
3775
3776 start_sequence ();
3777 temp = force_operand (new, NULL_RTX);
3778 seq = get_insns ();
3779 end_sequence ();
3780
3781 emit_insns_before (seq, object);
3782 if (! validate_change (object, loc, temp, 0)
3783 && ! validate_replace_rtx (x, temp, object))
3784 abort ();
3785 }
3786 }
3787
3788 return 1;
3789 }
3790
3791 /* Fall through to generic two-operand expression case. */
3792 case EXPR_LIST:
3793 case CALL:
3794 case COMPARE:
3795 case MINUS:
3796 case MULT:
3797 case DIV: case UDIV:
3798 case MOD: case UMOD:
3799 case AND: case IOR: case XOR:
3800 case ROTATERT: case ROTATE:
3801 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3802 case NE: case EQ:
3803 case GE: case GT: case GEU: case GTU:
3804 case LE: case LT: case LEU: case LTU:
3805 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3806 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3807 loc = &XEXP (x, 0);
3808 goto restart;
3809
3810 case MEM:
3811 /* Most cases of MEM that convert to valid addresses have already been
3812 handled by our scan of decls. The only special handling we
3813 need here is to make a copy of the rtx to ensure it isn't being
3814 shared if we have to change it to a pseudo.
3815
3816 If the rtx is a simple reference to an address via a virtual register,
3817 it can potentially be shared. In such cases, first try to make it
3818 a valid address, which can also be shared. Otherwise, copy it and
3819 proceed normally.
3820
3821 First check for common cases that need no processing. These are
3822 usually due to instantiation already being done on a previous instance
3823 of a shared rtx. */
3824
3825 temp = XEXP (x, 0);
3826 if (CONSTANT_ADDRESS_P (temp)
3827 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3828 || temp == arg_pointer_rtx
3829 #endif
3830 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3831 || temp == hard_frame_pointer_rtx
3832 #endif
3833 || temp == frame_pointer_rtx)
3834 return 1;
3835
3836 if (GET_CODE (temp) == PLUS
3837 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3838 && (XEXP (temp, 0) == frame_pointer_rtx
3839 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3840 || XEXP (temp, 0) == hard_frame_pointer_rtx
3841 #endif
3842 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3843 || XEXP (temp, 0) == arg_pointer_rtx
3844 #endif
3845 ))
3846 return 1;
3847
3848 if (temp == virtual_stack_vars_rtx
3849 || temp == virtual_incoming_args_rtx
3850 || (GET_CODE (temp) == PLUS
3851 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3852 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3853 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3854 {
3855 /* This MEM may be shared. If the substitution can be done without
3856 the need to generate new pseudos, we want to do it in place
3857 so all copies of the shared rtx benefit. The call below will
3858 only make substitutions if the resulting address is still
3859 valid.
3860
3861 Note that we cannot pass X as the object in the recursive call
3862 since the insn being processed may not allow all valid
3863 addresses. However, if we were not passed on object, we can
3864 only modify X without copying it if X will have a valid
3865 address.
3866
3867 ??? Also note that this can still lose if OBJECT is an insn that
3868 has less restrictions on an address that some other insn.
3869 In that case, we will modify the shared address. This case
3870 doesn't seem very likely, though. One case where this could
3871 happen is in the case of a USE or CLOBBER reference, but we
3872 take care of that below. */
3873
3874 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3875 object ? object : x, 0))
3876 return 1;
3877
3878 /* Otherwise make a copy and process that copy. We copy the entire
3879 RTL expression since it might be a PLUS which could also be
3880 shared. */
3881 *loc = x = copy_rtx (x);
3882 }
3883
3884 /* Fall through to generic unary operation case. */
3885 case SUBREG:
3886 case STRICT_LOW_PART:
3887 case NEG: case NOT:
3888 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3889 case SIGN_EXTEND: case ZERO_EXTEND:
3890 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3891 case FLOAT: case FIX:
3892 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3893 case ABS:
3894 case SQRT:
3895 case FFS:
3896 /* These case either have just one operand or we know that we need not
3897 check the rest of the operands. */
3898 loc = &XEXP (x, 0);
3899 goto restart;
3900
3901 case USE:
3902 case CLOBBER:
3903 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3904 go ahead and make the invalid one, but do it to a copy. For a REG,
3905 just make the recursive call, since there's no chance of a problem. */
3906
3907 if ((GET_CODE (XEXP (x, 0)) == MEM
3908 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3909 0))
3910 || (GET_CODE (XEXP (x, 0)) == REG
3911 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3912 return 1;
3913
3914 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3915 loc = &XEXP (x, 0);
3916 goto restart;
3917
3918 case REG:
3919 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3920 in front of this insn and substitute the temporary. */
3921 if (x == virtual_incoming_args_rtx)
3922 new = arg_pointer_rtx, offset = in_arg_offset;
3923 else if (x == virtual_stack_vars_rtx)
3924 new = frame_pointer_rtx, offset = var_offset;
3925 else if (x == virtual_stack_dynamic_rtx)
3926 new = stack_pointer_rtx, offset = dynamic_offset;
3927 else if (x == virtual_outgoing_args_rtx)
3928 new = stack_pointer_rtx, offset = out_arg_offset;
3929 else if (x == virtual_cfa_rtx)
3930 new = arg_pointer_rtx, offset = cfa_offset;
3931
3932 if (new)
3933 {
3934 temp = plus_constant (new, offset);
3935 if (!validate_change (object, loc, temp, 0))
3936 {
3937 if (! extra_insns)
3938 return 0;
3939
3940 start_sequence ();
3941 temp = force_operand (temp, NULL_RTX);
3942 seq = get_insns ();
3943 end_sequence ();
3944
3945 emit_insns_before (seq, object);
3946 if (! validate_change (object, loc, temp, 0)
3947 && ! validate_replace_rtx (x, temp, object))
3948 abort ();
3949 }
3950 }
3951
3952 return 1;
3953
3954 case ADDRESSOF:
3955 if (GET_CODE (XEXP (x, 0)) == REG)
3956 return 1;
3957
3958 else if (GET_CODE (XEXP (x, 0)) == MEM)
3959 {
3960 /* If we have a (addressof (mem ..)), do any instantiation inside
3961 since we know we'll be making the inside valid when we finally
3962 remove the ADDRESSOF. */
3963 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3964 return 1;
3965 }
3966 break;
3967
3968 default:
3969 break;
3970 }
3971
3972 /* Scan all subexpressions. */
3973 fmt = GET_RTX_FORMAT (code);
3974 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3975 if (*fmt == 'e')
3976 {
3977 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3978 return 0;
3979 }
3980 else if (*fmt == 'E')
3981 for (j = 0; j < XVECLEN (x, i); j++)
3982 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3983 extra_insns))
3984 return 0;
3985
3986 return 1;
3987 }
3988 \f
3989 /* Optimization: assuming this function does not receive nonlocal gotos,
3990 delete the handlers for such, as well as the insns to establish
3991 and disestablish them. */
3992
3993 static void
3994 delete_handlers ()
3995 {
3996 rtx insn;
3997 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3998 {
3999 /* Delete the handler by turning off the flag that would
4000 prevent jump_optimize from deleting it.
4001 Also permit deletion of the nonlocal labels themselves
4002 if nothing local refers to them. */
4003 if (GET_CODE (insn) == CODE_LABEL)
4004 {
4005 tree t, last_t;
4006
4007 LABEL_PRESERVE_P (insn) = 0;
4008
4009 /* Remove it from the nonlocal_label list, to avoid confusing
4010 flow. */
4011 for (t = nonlocal_labels, last_t = 0; t;
4012 last_t = t, t = TREE_CHAIN (t))
4013 if (DECL_RTL (TREE_VALUE (t)) == insn)
4014 break;
4015 if (t)
4016 {
4017 if (! last_t)
4018 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4019 else
4020 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4021 }
4022 }
4023 if (GET_CODE (insn) == INSN)
4024 {
4025 int can_delete = 0;
4026 rtx t;
4027 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4028 if (reg_mentioned_p (t, PATTERN (insn)))
4029 {
4030 can_delete = 1;
4031 break;
4032 }
4033 if (can_delete
4034 || (nonlocal_goto_stack_level != 0
4035 && reg_mentioned_p (nonlocal_goto_stack_level,
4036 PATTERN (insn))))
4037 delete_insn (insn);
4038 }
4039 }
4040 }
4041 \f
4042 int
4043 max_parm_reg_num ()
4044 {
4045 return max_parm_reg;
4046 }
4047
4048 /* Return the first insn following those generated by `assign_parms'. */
4049
4050 rtx
4051 get_first_nonparm_insn ()
4052 {
4053 if (last_parm_insn)
4054 return NEXT_INSN (last_parm_insn);
4055 return get_insns ();
4056 }
4057
4058 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4059 Crash if there is none. */
4060
4061 rtx
4062 get_first_block_beg ()
4063 {
4064 register rtx searcher;
4065 register rtx insn = get_first_nonparm_insn ();
4066
4067 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4068 if (GET_CODE (searcher) == NOTE
4069 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4070 return searcher;
4071
4072 abort (); /* Invalid call to this function. (See comments above.) */
4073 return NULL_RTX;
4074 }
4075
4076 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4077 This means a type for which function calls must pass an address to the
4078 function or get an address back from the function.
4079 EXP may be a type node or an expression (whose type is tested). */
4080
4081 int
4082 aggregate_value_p (exp)
4083 tree exp;
4084 {
4085 int i, regno, nregs;
4086 rtx reg;
4087
4088 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4089
4090 if (TREE_CODE (type) == VOID_TYPE)
4091 return 0;
4092 if (RETURN_IN_MEMORY (type))
4093 return 1;
4094 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4095 and thus can't be returned in registers. */
4096 if (TREE_ADDRESSABLE (type))
4097 return 1;
4098 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4099 return 1;
4100 /* Make sure we have suitable call-clobbered regs to return
4101 the value in; if not, we must return it in memory. */
4102 reg = hard_function_value (type, 0, 0);
4103
4104 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4105 it is OK. */
4106 if (GET_CODE (reg) != REG)
4107 return 0;
4108
4109 regno = REGNO (reg);
4110 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4111 for (i = 0; i < nregs; i++)
4112 if (! call_used_regs[regno + i])
4113 return 1;
4114 return 0;
4115 }
4116 \f
4117 /* Assign RTL expressions to the function's parameters.
4118 This may involve copying them into registers and using
4119 those registers as the RTL for them. */
4120
4121 void
4122 assign_parms (fndecl)
4123 tree fndecl;
4124 {
4125 register tree parm;
4126 register rtx entry_parm = 0;
4127 register rtx stack_parm = 0;
4128 CUMULATIVE_ARGS args_so_far;
4129 enum machine_mode promoted_mode, passed_mode;
4130 enum machine_mode nominal_mode, promoted_nominal_mode;
4131 int unsignedp;
4132 /* Total space needed so far for args on the stack,
4133 given as a constant and a tree-expression. */
4134 struct args_size stack_args_size;
4135 tree fntype = TREE_TYPE (fndecl);
4136 tree fnargs = DECL_ARGUMENTS (fndecl);
4137 /* This is used for the arg pointer when referring to stack args. */
4138 rtx internal_arg_pointer;
4139 /* This is a dummy PARM_DECL that we used for the function result if
4140 the function returns a structure. */
4141 tree function_result_decl = 0;
4142 #ifdef SETUP_INCOMING_VARARGS
4143 int varargs_setup = 0;
4144 #endif
4145 rtx conversion_insns = 0;
4146 struct args_size alignment_pad;
4147
4148 /* Nonzero if the last arg is named `__builtin_va_alist',
4149 which is used on some machines for old-fashioned non-ANSI varargs.h;
4150 this should be stuck onto the stack as if it had arrived there. */
4151 int hide_last_arg
4152 = (current_function_varargs
4153 && fnargs
4154 && (parm = tree_last (fnargs)) != 0
4155 && DECL_NAME (parm)
4156 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4157 "__builtin_va_alist")));
4158
4159 /* Nonzero if function takes extra anonymous args.
4160 This means the last named arg must be on the stack
4161 right before the anonymous ones. */
4162 int stdarg
4163 = (TYPE_ARG_TYPES (fntype) != 0
4164 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4165 != void_type_node));
4166
4167 current_function_stdarg = stdarg;
4168
4169 /* If the reg that the virtual arg pointer will be translated into is
4170 not a fixed reg or is the stack pointer, make a copy of the virtual
4171 arg pointer, and address parms via the copy. The frame pointer is
4172 considered fixed even though it is not marked as such.
4173
4174 The second time through, simply use ap to avoid generating rtx. */
4175
4176 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4177 || ! (fixed_regs[ARG_POINTER_REGNUM]
4178 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4179 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4180 else
4181 internal_arg_pointer = virtual_incoming_args_rtx;
4182 current_function_internal_arg_pointer = internal_arg_pointer;
4183
4184 stack_args_size.constant = 0;
4185 stack_args_size.var = 0;
4186
4187 /* If struct value address is treated as the first argument, make it so. */
4188 if (aggregate_value_p (DECL_RESULT (fndecl))
4189 && ! current_function_returns_pcc_struct
4190 && struct_value_incoming_rtx == 0)
4191 {
4192 tree type = build_pointer_type (TREE_TYPE (fntype));
4193
4194 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4195
4196 DECL_ARG_TYPE (function_result_decl) = type;
4197 TREE_CHAIN (function_result_decl) = fnargs;
4198 fnargs = function_result_decl;
4199 }
4200
4201 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4202 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4203
4204 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4205 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4206 #else
4207 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4208 #endif
4209
4210 /* We haven't yet found an argument that we must push and pretend the
4211 caller did. */
4212 current_function_pretend_args_size = 0;
4213
4214 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4215 {
4216 struct args_size stack_offset;
4217 struct args_size arg_size;
4218 int passed_pointer = 0;
4219 int did_conversion = 0;
4220 tree passed_type = DECL_ARG_TYPE (parm);
4221 tree nominal_type = TREE_TYPE (parm);
4222 int pretend_named;
4223
4224 /* Set LAST_NAMED if this is last named arg before some
4225 anonymous args. */
4226 int last_named = ((TREE_CHAIN (parm) == 0
4227 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4228 && (stdarg || current_function_varargs));
4229 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4230 most machines, if this is a varargs/stdarg function, then we treat
4231 the last named arg as if it were anonymous too. */
4232 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4233
4234 if (TREE_TYPE (parm) == error_mark_node
4235 /* This can happen after weird syntax errors
4236 or if an enum type is defined among the parms. */
4237 || TREE_CODE (parm) != PARM_DECL
4238 || passed_type == NULL)
4239 {
4240 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4241 = gen_rtx_MEM (BLKmode, const0_rtx);
4242 TREE_USED (parm) = 1;
4243 continue;
4244 }
4245
4246 /* For varargs.h function, save info about regs and stack space
4247 used by the individual args, not including the va_alist arg. */
4248 if (hide_last_arg && last_named)
4249 current_function_args_info = args_so_far;
4250
4251 /* Find mode of arg as it is passed, and mode of arg
4252 as it should be during execution of this function. */
4253 passed_mode = TYPE_MODE (passed_type);
4254 nominal_mode = TYPE_MODE (nominal_type);
4255
4256 /* If the parm's mode is VOID, its value doesn't matter,
4257 and avoid the usual things like emit_move_insn that could crash. */
4258 if (nominal_mode == VOIDmode)
4259 {
4260 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4261 continue;
4262 }
4263
4264 /* If the parm is to be passed as a transparent union, use the
4265 type of the first field for the tests below. We have already
4266 verified that the modes are the same. */
4267 if (DECL_TRANSPARENT_UNION (parm)
4268 || (TREE_CODE (passed_type) == UNION_TYPE
4269 && TYPE_TRANSPARENT_UNION (passed_type)))
4270 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4271
4272 /* See if this arg was passed by invisible reference. It is if
4273 it is an object whose size depends on the contents of the
4274 object itself or if the machine requires these objects be passed
4275 that way. */
4276
4277 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4278 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4279 || TREE_ADDRESSABLE (passed_type)
4280 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4281 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4282 passed_type, named_arg)
4283 #endif
4284 )
4285 {
4286 passed_type = nominal_type = build_pointer_type (passed_type);
4287 passed_pointer = 1;
4288 passed_mode = nominal_mode = Pmode;
4289 }
4290
4291 promoted_mode = passed_mode;
4292
4293 #ifdef PROMOTE_FUNCTION_ARGS
4294 /* Compute the mode in which the arg is actually extended to. */
4295 unsignedp = TREE_UNSIGNED (passed_type);
4296 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4297 #endif
4298
4299 /* Let machine desc say which reg (if any) the parm arrives in.
4300 0 means it arrives on the stack. */
4301 #ifdef FUNCTION_INCOMING_ARG
4302 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4303 passed_type, named_arg);
4304 #else
4305 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4306 passed_type, named_arg);
4307 #endif
4308
4309 if (entry_parm == 0)
4310 promoted_mode = passed_mode;
4311
4312 #ifdef SETUP_INCOMING_VARARGS
4313 /* If this is the last named parameter, do any required setup for
4314 varargs or stdargs. We need to know about the case of this being an
4315 addressable type, in which case we skip the registers it
4316 would have arrived in.
4317
4318 For stdargs, LAST_NAMED will be set for two parameters, the one that
4319 is actually the last named, and the dummy parameter. We only
4320 want to do this action once.
4321
4322 Also, indicate when RTL generation is to be suppressed. */
4323 if (last_named && !varargs_setup)
4324 {
4325 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4326 current_function_pretend_args_size, 0);
4327 varargs_setup = 1;
4328 }
4329 #endif
4330
4331 /* Determine parm's home in the stack,
4332 in case it arrives in the stack or we should pretend it did.
4333
4334 Compute the stack position and rtx where the argument arrives
4335 and its size.
4336
4337 There is one complexity here: If this was a parameter that would
4338 have been passed in registers, but wasn't only because it is
4339 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4340 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4341 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4342 0 as it was the previous time. */
4343
4344 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4345 locate_and_pad_parm (promoted_mode, passed_type,
4346 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4347 1,
4348 #else
4349 #ifdef FUNCTION_INCOMING_ARG
4350 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4351 passed_type,
4352 pretend_named) != 0,
4353 #else
4354 FUNCTION_ARG (args_so_far, promoted_mode,
4355 passed_type,
4356 pretend_named) != 0,
4357 #endif
4358 #endif
4359 fndecl, &stack_args_size, &stack_offset, &arg_size,
4360 &alignment_pad);
4361
4362 {
4363 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4364
4365 if (offset_rtx == const0_rtx)
4366 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4367 else
4368 stack_parm = gen_rtx_MEM (promoted_mode,
4369 gen_rtx_PLUS (Pmode,
4370 internal_arg_pointer,
4371 offset_rtx));
4372
4373 set_mem_attributes (stack_parm, parm, 1);
4374 }
4375
4376 /* If this parameter was passed both in registers and in the stack,
4377 use the copy on the stack. */
4378 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4379 entry_parm = 0;
4380
4381 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4382 /* If this parm was passed part in regs and part in memory,
4383 pretend it arrived entirely in memory
4384 by pushing the register-part onto the stack.
4385
4386 In the special case of a DImode or DFmode that is split,
4387 we could put it together in a pseudoreg directly,
4388 but for now that's not worth bothering with. */
4389
4390 if (entry_parm)
4391 {
4392 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4393 passed_type, named_arg);
4394
4395 if (nregs > 0)
4396 {
4397 current_function_pretend_args_size
4398 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4399 / (PARM_BOUNDARY / BITS_PER_UNIT)
4400 * (PARM_BOUNDARY / BITS_PER_UNIT));
4401
4402 /* Handle calls that pass values in multiple non-contiguous
4403 locations. The Irix 6 ABI has examples of this. */
4404 if (GET_CODE (entry_parm) == PARALLEL)
4405 emit_group_store (validize_mem (stack_parm), entry_parm,
4406 int_size_in_bytes (TREE_TYPE (parm)),
4407 TYPE_ALIGN (TREE_TYPE (parm)));
4408
4409 else
4410 move_block_from_reg (REGNO (entry_parm),
4411 validize_mem (stack_parm), nregs,
4412 int_size_in_bytes (TREE_TYPE (parm)));
4413
4414 entry_parm = stack_parm;
4415 }
4416 }
4417 #endif
4418
4419 /* If we didn't decide this parm came in a register,
4420 by default it came on the stack. */
4421 if (entry_parm == 0)
4422 entry_parm = stack_parm;
4423
4424 /* Record permanently how this parm was passed. */
4425 DECL_INCOMING_RTL (parm) = entry_parm;
4426
4427 /* If there is actually space on the stack for this parm,
4428 count it in stack_args_size; otherwise set stack_parm to 0
4429 to indicate there is no preallocated stack slot for the parm. */
4430
4431 if (entry_parm == stack_parm
4432 || (GET_CODE (entry_parm) == PARALLEL
4433 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4434 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4435 /* On some machines, even if a parm value arrives in a register
4436 there is still an (uninitialized) stack slot allocated for it.
4437
4438 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4439 whether this parameter already has a stack slot allocated,
4440 because an arg block exists only if current_function_args_size
4441 is larger than some threshold, and we haven't calculated that
4442 yet. So, for now, we just assume that stack slots never exist
4443 in this case. */
4444 || REG_PARM_STACK_SPACE (fndecl) > 0
4445 #endif
4446 )
4447 {
4448 stack_args_size.constant += arg_size.constant;
4449 if (arg_size.var)
4450 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4451 }
4452 else
4453 /* No stack slot was pushed for this parm. */
4454 stack_parm = 0;
4455
4456 /* Update info on where next arg arrives in registers. */
4457
4458 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4459 passed_type, named_arg);
4460
4461 /* If we can't trust the parm stack slot to be aligned enough
4462 for its ultimate type, don't use that slot after entry.
4463 We'll make another stack slot, if we need one. */
4464 {
4465 unsigned int thisparm_boundary
4466 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4467
4468 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4469 stack_parm = 0;
4470 }
4471
4472 /* If parm was passed in memory, and we need to convert it on entry,
4473 don't store it back in that same slot. */
4474 if (entry_parm != 0
4475 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4476 stack_parm = 0;
4477
4478 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4479 in the mode in which it arrives.
4480 STACK_PARM is an RTX for a stack slot where the parameter can live
4481 during the function (in case we want to put it there).
4482 STACK_PARM is 0 if no stack slot was pushed for it.
4483
4484 Now output code if necessary to convert ENTRY_PARM to
4485 the type in which this function declares it,
4486 and store that result in an appropriate place,
4487 which may be a pseudo reg, may be STACK_PARM,
4488 or may be a local stack slot if STACK_PARM is 0.
4489
4490 Set DECL_RTL to that place. */
4491
4492 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4493 {
4494 /* If a BLKmode arrives in registers, copy it to a stack slot.
4495 Handle calls that pass values in multiple non-contiguous
4496 locations. The Irix 6 ABI has examples of this. */
4497 if (GET_CODE (entry_parm) == REG
4498 || GET_CODE (entry_parm) == PARALLEL)
4499 {
4500 int size_stored
4501 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4502 UNITS_PER_WORD);
4503
4504 /* Note that we will be storing an integral number of words.
4505 So we have to be careful to ensure that we allocate an
4506 integral number of words. We do this below in the
4507 assign_stack_local if space was not allocated in the argument
4508 list. If it was, this will not work if PARM_BOUNDARY is not
4509 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4510 if it becomes a problem. */
4511
4512 if (stack_parm == 0)
4513 {
4514 stack_parm
4515 = assign_stack_local (GET_MODE (entry_parm),
4516 size_stored, 0);
4517 set_mem_attributes (stack_parm, parm, 1);
4518 }
4519
4520 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4521 abort ();
4522
4523 /* Handle calls that pass values in multiple non-contiguous
4524 locations. The Irix 6 ABI has examples of this. */
4525 if (GET_CODE (entry_parm) == PARALLEL)
4526 emit_group_store (validize_mem (stack_parm), entry_parm,
4527 int_size_in_bytes (TREE_TYPE (parm)),
4528 TYPE_ALIGN (TREE_TYPE (parm)));
4529 else
4530 move_block_from_reg (REGNO (entry_parm),
4531 validize_mem (stack_parm),
4532 size_stored / UNITS_PER_WORD,
4533 int_size_in_bytes (TREE_TYPE (parm)));
4534 }
4535 DECL_RTL (parm) = stack_parm;
4536 }
4537 else if (! ((! optimize
4538 && ! DECL_REGISTER (parm)
4539 && ! DECL_INLINE (fndecl))
4540 /* layout_decl may set this. */
4541 || TREE_ADDRESSABLE (parm)
4542 || TREE_SIDE_EFFECTS (parm)
4543 /* If -ffloat-store specified, don't put explicit
4544 float variables into registers. */
4545 || (flag_float_store
4546 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4547 /* Always assign pseudo to structure return or item passed
4548 by invisible reference. */
4549 || passed_pointer || parm == function_result_decl)
4550 {
4551 /* Store the parm in a pseudoregister during the function, but we
4552 may need to do it in a wider mode. */
4553
4554 register rtx parmreg;
4555 unsigned int regno, regnoi = 0, regnor = 0;
4556
4557 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4558
4559 promoted_nominal_mode
4560 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4561
4562 parmreg = gen_reg_rtx (promoted_nominal_mode);
4563 mark_user_reg (parmreg);
4564
4565 /* If this was an item that we received a pointer to, set DECL_RTL
4566 appropriately. */
4567 if (passed_pointer)
4568 {
4569 DECL_RTL (parm)
4570 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4571 set_mem_attributes (DECL_RTL (parm), parm, 1);
4572 }
4573 else
4574 DECL_RTL (parm) = parmreg;
4575
4576 /* Copy the value into the register. */
4577 if (nominal_mode != passed_mode
4578 || promoted_nominal_mode != promoted_mode)
4579 {
4580 int save_tree_used;
4581 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4582 mode, by the caller. We now have to convert it to
4583 NOMINAL_MODE, if different. However, PARMREG may be in
4584 a different mode than NOMINAL_MODE if it is being stored
4585 promoted.
4586
4587 If ENTRY_PARM is a hard register, it might be in a register
4588 not valid for operating in its mode (e.g., an odd-numbered
4589 register for a DFmode). In that case, moves are the only
4590 thing valid, so we can't do a convert from there. This
4591 occurs when the calling sequence allow such misaligned
4592 usages.
4593
4594 In addition, the conversion may involve a call, which could
4595 clobber parameters which haven't been copied to pseudo
4596 registers yet. Therefore, we must first copy the parm to
4597 a pseudo reg here, and save the conversion until after all
4598 parameters have been moved. */
4599
4600 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4601
4602 emit_move_insn (tempreg, validize_mem (entry_parm));
4603
4604 push_to_sequence (conversion_insns);
4605 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4606
4607 /* TREE_USED gets set erroneously during expand_assignment. */
4608 save_tree_used = TREE_USED (parm);
4609 expand_assignment (parm,
4610 make_tree (nominal_type, tempreg), 0, 0);
4611 TREE_USED (parm) = save_tree_used;
4612 conversion_insns = get_insns ();
4613 did_conversion = 1;
4614 end_sequence ();
4615 }
4616 else
4617 emit_move_insn (parmreg, validize_mem (entry_parm));
4618
4619 /* If we were passed a pointer but the actual value
4620 can safely live in a register, put it in one. */
4621 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4622 && ! ((! optimize
4623 && ! DECL_REGISTER (parm)
4624 && ! DECL_INLINE (fndecl))
4625 /* layout_decl may set this. */
4626 || TREE_ADDRESSABLE (parm)
4627 || TREE_SIDE_EFFECTS (parm)
4628 /* If -ffloat-store specified, don't put explicit
4629 float variables into registers. */
4630 || (flag_float_store
4631 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4632 {
4633 /* We can't use nominal_mode, because it will have been set to
4634 Pmode above. We must use the actual mode of the parm. */
4635 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4636 mark_user_reg (parmreg);
4637 emit_move_insn (parmreg, DECL_RTL (parm));
4638 DECL_RTL (parm) = parmreg;
4639 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4640 now the parm. */
4641 stack_parm = 0;
4642 }
4643 #ifdef FUNCTION_ARG_CALLEE_COPIES
4644 /* If we are passed an arg by reference and it is our responsibility
4645 to make a copy, do it now.
4646 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4647 original argument, so we must recreate them in the call to
4648 FUNCTION_ARG_CALLEE_COPIES. */
4649 /* ??? Later add code to handle the case that if the argument isn't
4650 modified, don't do the copy. */
4651
4652 else if (passed_pointer
4653 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4654 TYPE_MODE (DECL_ARG_TYPE (parm)),
4655 DECL_ARG_TYPE (parm),
4656 named_arg)
4657 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4658 {
4659 rtx copy;
4660 tree type = DECL_ARG_TYPE (parm);
4661
4662 /* This sequence may involve a library call perhaps clobbering
4663 registers that haven't been copied to pseudos yet. */
4664
4665 push_to_sequence (conversion_insns);
4666
4667 if (!COMPLETE_TYPE_P (type)
4668 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4669 /* This is a variable sized object. */
4670 copy = gen_rtx_MEM (BLKmode,
4671 allocate_dynamic_stack_space
4672 (expr_size (parm), NULL_RTX,
4673 TYPE_ALIGN (type)));
4674 else
4675 copy = assign_stack_temp (TYPE_MODE (type),
4676 int_size_in_bytes (type), 1);
4677 set_mem_attributes (copy, parm, 1);
4678
4679 store_expr (parm, copy, 0);
4680 emit_move_insn (parmreg, XEXP (copy, 0));
4681 if (current_function_check_memory_usage)
4682 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4683 XEXP (copy, 0), Pmode,
4684 GEN_INT (int_size_in_bytes (type)),
4685 TYPE_MODE (sizetype),
4686 GEN_INT (MEMORY_USE_RW),
4687 TYPE_MODE (integer_type_node));
4688 conversion_insns = get_insns ();
4689 did_conversion = 1;
4690 end_sequence ();
4691 }
4692 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4693
4694 /* In any case, record the parm's desired stack location
4695 in case we later discover it must live in the stack.
4696
4697 If it is a COMPLEX value, store the stack location for both
4698 halves. */
4699
4700 if (GET_CODE (parmreg) == CONCAT)
4701 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4702 else
4703 regno = REGNO (parmreg);
4704
4705 if (regno >= max_parm_reg)
4706 {
4707 rtx *new;
4708 int old_max_parm_reg = max_parm_reg;
4709
4710 /* It's slow to expand this one register at a time,
4711 but it's also rare and we need max_parm_reg to be
4712 precisely correct. */
4713 max_parm_reg = regno + 1;
4714 new = (rtx *) xrealloc (parm_reg_stack_loc,
4715 max_parm_reg * sizeof (rtx));
4716 bzero ((char *) (new + old_max_parm_reg),
4717 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4718 parm_reg_stack_loc = new;
4719 }
4720
4721 if (GET_CODE (parmreg) == CONCAT)
4722 {
4723 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4724
4725 regnor = REGNO (gen_realpart (submode, parmreg));
4726 regnoi = REGNO (gen_imagpart (submode, parmreg));
4727
4728 if (stack_parm != 0)
4729 {
4730 parm_reg_stack_loc[regnor]
4731 = gen_realpart (submode, stack_parm);
4732 parm_reg_stack_loc[regnoi]
4733 = gen_imagpart (submode, stack_parm);
4734 }
4735 else
4736 {
4737 parm_reg_stack_loc[regnor] = 0;
4738 parm_reg_stack_loc[regnoi] = 0;
4739 }
4740 }
4741 else
4742 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4743
4744 /* Mark the register as eliminable if we did no conversion
4745 and it was copied from memory at a fixed offset,
4746 and the arg pointer was not copied to a pseudo-reg.
4747 If the arg pointer is a pseudo reg or the offset formed
4748 an invalid address, such memory-equivalences
4749 as we make here would screw up life analysis for it. */
4750 if (nominal_mode == passed_mode
4751 && ! did_conversion
4752 && stack_parm != 0
4753 && GET_CODE (stack_parm) == MEM
4754 && stack_offset.var == 0
4755 && reg_mentioned_p (virtual_incoming_args_rtx,
4756 XEXP (stack_parm, 0)))
4757 {
4758 rtx linsn = get_last_insn ();
4759 rtx sinsn, set;
4760
4761 /* Mark complex types separately. */
4762 if (GET_CODE (parmreg) == CONCAT)
4763 /* Scan backwards for the set of the real and
4764 imaginary parts. */
4765 for (sinsn = linsn; sinsn != 0;
4766 sinsn = prev_nonnote_insn (sinsn))
4767 {
4768 set = single_set (sinsn);
4769 if (set != 0
4770 && SET_DEST (set) == regno_reg_rtx [regnoi])
4771 REG_NOTES (sinsn)
4772 = gen_rtx_EXPR_LIST (REG_EQUIV,
4773 parm_reg_stack_loc[regnoi],
4774 REG_NOTES (sinsn));
4775 else if (set != 0
4776 && SET_DEST (set) == regno_reg_rtx [regnor])
4777 REG_NOTES (sinsn)
4778 = gen_rtx_EXPR_LIST (REG_EQUIV,
4779 parm_reg_stack_loc[regnor],
4780 REG_NOTES (sinsn));
4781 }
4782 else if ((set = single_set (linsn)) != 0
4783 && SET_DEST (set) == parmreg)
4784 REG_NOTES (linsn)
4785 = gen_rtx_EXPR_LIST (REG_EQUIV,
4786 stack_parm, REG_NOTES (linsn));
4787 }
4788
4789 /* For pointer data type, suggest pointer register. */
4790 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4791 mark_reg_pointer (parmreg,
4792 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4793
4794 }
4795 else
4796 {
4797 /* Value must be stored in the stack slot STACK_PARM
4798 during function execution. */
4799
4800 if (promoted_mode != nominal_mode)
4801 {
4802 /* Conversion is required. */
4803 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4804
4805 emit_move_insn (tempreg, validize_mem (entry_parm));
4806
4807 push_to_sequence (conversion_insns);
4808 entry_parm = convert_to_mode (nominal_mode, tempreg,
4809 TREE_UNSIGNED (TREE_TYPE (parm)));
4810 if (stack_parm)
4811 {
4812 /* ??? This may need a big-endian conversion on sparc64. */
4813 stack_parm = change_address (stack_parm, nominal_mode,
4814 NULL_RTX);
4815 }
4816 conversion_insns = get_insns ();
4817 did_conversion = 1;
4818 end_sequence ();
4819 }
4820
4821 if (entry_parm != stack_parm)
4822 {
4823 if (stack_parm == 0)
4824 {
4825 stack_parm
4826 = assign_stack_local (GET_MODE (entry_parm),
4827 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4828 set_mem_attributes (stack_parm, parm, 1);
4829 }
4830
4831 if (promoted_mode != nominal_mode)
4832 {
4833 push_to_sequence (conversion_insns);
4834 emit_move_insn (validize_mem (stack_parm),
4835 validize_mem (entry_parm));
4836 conversion_insns = get_insns ();
4837 end_sequence ();
4838 }
4839 else
4840 emit_move_insn (validize_mem (stack_parm),
4841 validize_mem (entry_parm));
4842 }
4843 if (current_function_check_memory_usage)
4844 {
4845 push_to_sequence (conversion_insns);
4846 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4847 XEXP (stack_parm, 0), Pmode,
4848 GEN_INT (GET_MODE_SIZE (GET_MODE
4849 (entry_parm))),
4850 TYPE_MODE (sizetype),
4851 GEN_INT (MEMORY_USE_RW),
4852 TYPE_MODE (integer_type_node));
4853
4854 conversion_insns = get_insns ();
4855 end_sequence ();
4856 }
4857 DECL_RTL (parm) = stack_parm;
4858 }
4859
4860 /* If this "parameter" was the place where we are receiving the
4861 function's incoming structure pointer, set up the result. */
4862 if (parm == function_result_decl)
4863 {
4864 tree result = DECL_RESULT (fndecl);
4865
4866 DECL_RTL (result)
4867 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4868
4869 set_mem_attributes (DECL_RTL (result), result, 1);
4870 }
4871 }
4872
4873 /* Output all parameter conversion instructions (possibly including calls)
4874 now that all parameters have been copied out of hard registers. */
4875 emit_insns (conversion_insns);
4876
4877 last_parm_insn = get_last_insn ();
4878
4879 current_function_args_size = stack_args_size.constant;
4880
4881 /* Adjust function incoming argument size for alignment and
4882 minimum length. */
4883
4884 #ifdef REG_PARM_STACK_SPACE
4885 #ifndef MAYBE_REG_PARM_STACK_SPACE
4886 current_function_args_size = MAX (current_function_args_size,
4887 REG_PARM_STACK_SPACE (fndecl));
4888 #endif
4889 #endif
4890
4891 #ifdef STACK_BOUNDARY
4892 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4893
4894 current_function_args_size
4895 = ((current_function_args_size + STACK_BYTES - 1)
4896 / STACK_BYTES) * STACK_BYTES;
4897 #endif
4898
4899 #ifdef ARGS_GROW_DOWNWARD
4900 current_function_arg_offset_rtx
4901 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4902 : expand_expr (size_diffop (stack_args_size.var,
4903 size_int (-stack_args_size.constant)),
4904 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4905 #else
4906 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4907 #endif
4908
4909 /* See how many bytes, if any, of its args a function should try to pop
4910 on return. */
4911
4912 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4913 current_function_args_size);
4914
4915 /* For stdarg.h function, save info about
4916 regs and stack space used by the named args. */
4917
4918 if (!hide_last_arg)
4919 current_function_args_info = args_so_far;
4920
4921 /* Set the rtx used for the function return value. Put this in its
4922 own variable so any optimizers that need this information don't have
4923 to include tree.h. Do this here so it gets done when an inlined
4924 function gets output. */
4925
4926 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4927 }
4928 \f
4929 /* Indicate whether REGNO is an incoming argument to the current function
4930 that was promoted to a wider mode. If so, return the RTX for the
4931 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4932 that REGNO is promoted from and whether the promotion was signed or
4933 unsigned. */
4934
4935 #ifdef PROMOTE_FUNCTION_ARGS
4936
4937 rtx
4938 promoted_input_arg (regno, pmode, punsignedp)
4939 unsigned int regno;
4940 enum machine_mode *pmode;
4941 int *punsignedp;
4942 {
4943 tree arg;
4944
4945 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4946 arg = TREE_CHAIN (arg))
4947 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4948 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4949 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4950 {
4951 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4952 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4953
4954 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4955 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4956 && mode != DECL_MODE (arg))
4957 {
4958 *pmode = DECL_MODE (arg);
4959 *punsignedp = unsignedp;
4960 return DECL_INCOMING_RTL (arg);
4961 }
4962 }
4963
4964 return 0;
4965 }
4966
4967 #endif
4968 \f
4969 /* Compute the size and offset from the start of the stacked arguments for a
4970 parm passed in mode PASSED_MODE and with type TYPE.
4971
4972 INITIAL_OFFSET_PTR points to the current offset into the stacked
4973 arguments.
4974
4975 The starting offset and size for this parm are returned in *OFFSET_PTR
4976 and *ARG_SIZE_PTR, respectively.
4977
4978 IN_REGS is non-zero if the argument will be passed in registers. It will
4979 never be set if REG_PARM_STACK_SPACE is not defined.
4980
4981 FNDECL is the function in which the argument was defined.
4982
4983 There are two types of rounding that are done. The first, controlled by
4984 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4985 list to be aligned to the specific boundary (in bits). This rounding
4986 affects the initial and starting offsets, but not the argument size.
4987
4988 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4989 optionally rounds the size of the parm to PARM_BOUNDARY. The
4990 initial offset is not affected by this rounding, while the size always
4991 is and the starting offset may be. */
4992
4993 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4994 initial_offset_ptr is positive because locate_and_pad_parm's
4995 callers pass in the total size of args so far as
4996 initial_offset_ptr. arg_size_ptr is always positive.*/
4997
4998 void
4999 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5000 initial_offset_ptr, offset_ptr, arg_size_ptr,
5001 alignment_pad)
5002 enum machine_mode passed_mode;
5003 tree type;
5004 int in_regs ATTRIBUTE_UNUSED;
5005 tree fndecl ATTRIBUTE_UNUSED;
5006 struct args_size *initial_offset_ptr;
5007 struct args_size *offset_ptr;
5008 struct args_size *arg_size_ptr;
5009 struct args_size *alignment_pad;
5010
5011 {
5012 tree sizetree
5013 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5014 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5015 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5016
5017 #ifdef REG_PARM_STACK_SPACE
5018 /* If we have found a stack parm before we reach the end of the
5019 area reserved for registers, skip that area. */
5020 if (! in_regs)
5021 {
5022 int reg_parm_stack_space = 0;
5023
5024 #ifdef MAYBE_REG_PARM_STACK_SPACE
5025 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5026 #else
5027 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5028 #endif
5029 if (reg_parm_stack_space > 0)
5030 {
5031 if (initial_offset_ptr->var)
5032 {
5033 initial_offset_ptr->var
5034 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5035 ssize_int (reg_parm_stack_space));
5036 initial_offset_ptr->constant = 0;
5037 }
5038 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5039 initial_offset_ptr->constant = reg_parm_stack_space;
5040 }
5041 }
5042 #endif /* REG_PARM_STACK_SPACE */
5043
5044 arg_size_ptr->var = 0;
5045 arg_size_ptr->constant = 0;
5046
5047 #ifdef ARGS_GROW_DOWNWARD
5048 if (initial_offset_ptr->var)
5049 {
5050 offset_ptr->constant = 0;
5051 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5052 initial_offset_ptr->var);
5053 }
5054 else
5055 {
5056 offset_ptr->constant = - initial_offset_ptr->constant;
5057 offset_ptr->var = 0;
5058 }
5059 if (where_pad != none
5060 && (TREE_CODE (sizetree) != INTEGER_CST
5061 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5062 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5063 SUB_PARM_SIZE (*offset_ptr, sizetree);
5064 if (where_pad != downward)
5065 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5066 if (initial_offset_ptr->var)
5067 arg_size_ptr->var = size_binop (MINUS_EXPR,
5068 size_binop (MINUS_EXPR,
5069 ssize_int (0),
5070 initial_offset_ptr->var),
5071 offset_ptr->var);
5072
5073 else
5074 arg_size_ptr->constant = (- initial_offset_ptr->constant
5075 - offset_ptr->constant);
5076
5077 #else /* !ARGS_GROW_DOWNWARD */
5078 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5079 *offset_ptr = *initial_offset_ptr;
5080
5081 #ifdef PUSH_ROUNDING
5082 if (passed_mode != BLKmode)
5083 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5084 #endif
5085
5086 /* Pad_below needs the pre-rounded size to know how much to pad below
5087 so this must be done before rounding up. */
5088 if (where_pad == downward
5089 /* However, BLKmode args passed in regs have their padding done elsewhere.
5090 The stack slot must be able to hold the entire register. */
5091 && !(in_regs && passed_mode == BLKmode))
5092 pad_below (offset_ptr, passed_mode, sizetree);
5093
5094 if (where_pad != none
5095 && (TREE_CODE (sizetree) != INTEGER_CST
5096 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5097 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5098
5099 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5100 #endif /* ARGS_GROW_DOWNWARD */
5101 }
5102
5103 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5104 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5105
5106 static void
5107 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5108 struct args_size *offset_ptr;
5109 int boundary;
5110 struct args_size *alignment_pad;
5111 {
5112 tree save_var = NULL_TREE;
5113 HOST_WIDE_INT save_constant = 0;
5114
5115 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5116
5117 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5118 {
5119 save_var = offset_ptr->var;
5120 save_constant = offset_ptr->constant;
5121 }
5122
5123 alignment_pad->var = NULL_TREE;
5124 alignment_pad->constant = 0;
5125
5126 if (boundary > BITS_PER_UNIT)
5127 {
5128 if (offset_ptr->var)
5129 {
5130 offset_ptr->var =
5131 #ifdef ARGS_GROW_DOWNWARD
5132 round_down
5133 #else
5134 round_up
5135 #endif
5136 (ARGS_SIZE_TREE (*offset_ptr),
5137 boundary / BITS_PER_UNIT);
5138 offset_ptr->constant = 0; /*?*/
5139 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5140 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5141 save_var);
5142 }
5143 else
5144 {
5145 offset_ptr->constant =
5146 #ifdef ARGS_GROW_DOWNWARD
5147 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5148 #else
5149 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5150 #endif
5151 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5152 alignment_pad->constant = offset_ptr->constant - save_constant;
5153 }
5154 }
5155 }
5156
5157 #ifndef ARGS_GROW_DOWNWARD
5158 static void
5159 pad_below (offset_ptr, passed_mode, sizetree)
5160 struct args_size *offset_ptr;
5161 enum machine_mode passed_mode;
5162 tree sizetree;
5163 {
5164 if (passed_mode != BLKmode)
5165 {
5166 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5167 offset_ptr->constant
5168 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5169 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5170 - GET_MODE_SIZE (passed_mode));
5171 }
5172 else
5173 {
5174 if (TREE_CODE (sizetree) != INTEGER_CST
5175 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5176 {
5177 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5178 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5179 /* Add it in. */
5180 ADD_PARM_SIZE (*offset_ptr, s2);
5181 SUB_PARM_SIZE (*offset_ptr, sizetree);
5182 }
5183 }
5184 }
5185 #endif
5186 \f
5187 /* Walk the tree of blocks describing the binding levels within a function
5188 and warn about uninitialized variables.
5189 This is done after calling flow_analysis and before global_alloc
5190 clobbers the pseudo-regs to hard regs. */
5191
5192 void
5193 uninitialized_vars_warning (block)
5194 tree block;
5195 {
5196 register tree decl, sub;
5197 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5198 {
5199 if (warn_uninitialized
5200 && TREE_CODE (decl) == VAR_DECL
5201 /* These warnings are unreliable for and aggregates
5202 because assigning the fields one by one can fail to convince
5203 flow.c that the entire aggregate was initialized.
5204 Unions are troublesome because members may be shorter. */
5205 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5206 && DECL_RTL (decl) != 0
5207 && GET_CODE (DECL_RTL (decl)) == REG
5208 /* Global optimizations can make it difficult to determine if a
5209 particular variable has been initialized. However, a VAR_DECL
5210 with a nonzero DECL_INITIAL had an initializer, so do not
5211 claim it is potentially uninitialized.
5212
5213 We do not care about the actual value in DECL_INITIAL, so we do
5214 not worry that it may be a dangling pointer. */
5215 && DECL_INITIAL (decl) == NULL_TREE
5216 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5217 warning_with_decl (decl,
5218 "`%s' might be used uninitialized in this function");
5219 if (extra_warnings
5220 && TREE_CODE (decl) == VAR_DECL
5221 && DECL_RTL (decl) != 0
5222 && GET_CODE (DECL_RTL (decl)) == REG
5223 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5224 warning_with_decl (decl,
5225 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5226 }
5227 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5228 uninitialized_vars_warning (sub);
5229 }
5230
5231 /* Do the appropriate part of uninitialized_vars_warning
5232 but for arguments instead of local variables. */
5233
5234 void
5235 setjmp_args_warning ()
5236 {
5237 register tree decl;
5238 for (decl = DECL_ARGUMENTS (current_function_decl);
5239 decl; decl = TREE_CHAIN (decl))
5240 if (DECL_RTL (decl) != 0
5241 && GET_CODE (DECL_RTL (decl)) == REG
5242 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5243 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5244 }
5245
5246 /* If this function call setjmp, put all vars into the stack
5247 unless they were declared `register'. */
5248
5249 void
5250 setjmp_protect (block)
5251 tree block;
5252 {
5253 register tree decl, sub;
5254 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5255 if ((TREE_CODE (decl) == VAR_DECL
5256 || TREE_CODE (decl) == PARM_DECL)
5257 && DECL_RTL (decl) != 0
5258 && (GET_CODE (DECL_RTL (decl)) == REG
5259 || (GET_CODE (DECL_RTL (decl)) == MEM
5260 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5261 /* If this variable came from an inline function, it must be
5262 that its life doesn't overlap the setjmp. If there was a
5263 setjmp in the function, it would already be in memory. We
5264 must exclude such variable because their DECL_RTL might be
5265 set to strange things such as virtual_stack_vars_rtx. */
5266 && ! DECL_FROM_INLINE (decl)
5267 && (
5268 #ifdef NON_SAVING_SETJMP
5269 /* If longjmp doesn't restore the registers,
5270 don't put anything in them. */
5271 NON_SAVING_SETJMP
5272 ||
5273 #endif
5274 ! DECL_REGISTER (decl)))
5275 put_var_into_stack (decl);
5276 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5277 setjmp_protect (sub);
5278 }
5279 \f
5280 /* Like the previous function, but for args instead of local variables. */
5281
5282 void
5283 setjmp_protect_args ()
5284 {
5285 register tree decl;
5286 for (decl = DECL_ARGUMENTS (current_function_decl);
5287 decl; decl = TREE_CHAIN (decl))
5288 if ((TREE_CODE (decl) == VAR_DECL
5289 || TREE_CODE (decl) == PARM_DECL)
5290 && DECL_RTL (decl) != 0
5291 && (GET_CODE (DECL_RTL (decl)) == REG
5292 || (GET_CODE (DECL_RTL (decl)) == MEM
5293 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5294 && (
5295 /* If longjmp doesn't restore the registers,
5296 don't put anything in them. */
5297 #ifdef NON_SAVING_SETJMP
5298 NON_SAVING_SETJMP
5299 ||
5300 #endif
5301 ! DECL_REGISTER (decl)))
5302 put_var_into_stack (decl);
5303 }
5304 \f
5305 /* Return the context-pointer register corresponding to DECL,
5306 or 0 if it does not need one. */
5307
5308 rtx
5309 lookup_static_chain (decl)
5310 tree decl;
5311 {
5312 tree context = decl_function_context (decl);
5313 tree link;
5314
5315 if (context == 0
5316 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5317 return 0;
5318
5319 /* We treat inline_function_decl as an alias for the current function
5320 because that is the inline function whose vars, types, etc.
5321 are being merged into the current function.
5322 See expand_inline_function. */
5323 if (context == current_function_decl || context == inline_function_decl)
5324 return virtual_stack_vars_rtx;
5325
5326 for (link = context_display; link; link = TREE_CHAIN (link))
5327 if (TREE_PURPOSE (link) == context)
5328 return RTL_EXPR_RTL (TREE_VALUE (link));
5329
5330 abort ();
5331 }
5332 \f
5333 /* Convert a stack slot address ADDR for variable VAR
5334 (from a containing function)
5335 into an address valid in this function (using a static chain). */
5336
5337 rtx
5338 fix_lexical_addr (addr, var)
5339 rtx addr;
5340 tree var;
5341 {
5342 rtx basereg;
5343 HOST_WIDE_INT displacement;
5344 tree context = decl_function_context (var);
5345 struct function *fp;
5346 rtx base = 0;
5347
5348 /* If this is the present function, we need not do anything. */
5349 if (context == current_function_decl || context == inline_function_decl)
5350 return addr;
5351
5352 for (fp = outer_function_chain; fp; fp = fp->next)
5353 if (fp->decl == context)
5354 break;
5355
5356 if (fp == 0)
5357 abort ();
5358
5359 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5360 addr = XEXP (XEXP (addr, 0), 0);
5361
5362 /* Decode given address as base reg plus displacement. */
5363 if (GET_CODE (addr) == REG)
5364 basereg = addr, displacement = 0;
5365 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5366 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5367 else
5368 abort ();
5369
5370 /* We accept vars reached via the containing function's
5371 incoming arg pointer and via its stack variables pointer. */
5372 if (basereg == fp->internal_arg_pointer)
5373 {
5374 /* If reached via arg pointer, get the arg pointer value
5375 out of that function's stack frame.
5376
5377 There are two cases: If a separate ap is needed, allocate a
5378 slot in the outer function for it and dereference it that way.
5379 This is correct even if the real ap is actually a pseudo.
5380 Otherwise, just adjust the offset from the frame pointer to
5381 compensate. */
5382
5383 #ifdef NEED_SEPARATE_AP
5384 rtx addr;
5385
5386 if (fp->x_arg_pointer_save_area == 0)
5387 fp->x_arg_pointer_save_area
5388 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5389
5390 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5391 addr = memory_address (Pmode, addr);
5392
5393 base = gen_rtx_MEM (Pmode, addr);
5394 MEM_ALIAS_SET (base) = get_frame_alias_set ();
5395 base = copy_to_reg (base);
5396 #else
5397 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5398 base = lookup_static_chain (var);
5399 #endif
5400 }
5401
5402 else if (basereg == virtual_stack_vars_rtx)
5403 {
5404 /* This is the same code as lookup_static_chain, duplicated here to
5405 avoid an extra call to decl_function_context. */
5406 tree link;
5407
5408 for (link = context_display; link; link = TREE_CHAIN (link))
5409 if (TREE_PURPOSE (link) == context)
5410 {
5411 base = RTL_EXPR_RTL (TREE_VALUE (link));
5412 break;
5413 }
5414 }
5415
5416 if (base == 0)
5417 abort ();
5418
5419 /* Use same offset, relative to appropriate static chain or argument
5420 pointer. */
5421 return plus_constant (base, displacement);
5422 }
5423 \f
5424 /* Return the address of the trampoline for entering nested fn FUNCTION.
5425 If necessary, allocate a trampoline (in the stack frame)
5426 and emit rtl to initialize its contents (at entry to this function). */
5427
5428 rtx
5429 trampoline_address (function)
5430 tree function;
5431 {
5432 tree link;
5433 tree rtlexp;
5434 rtx tramp;
5435 struct function *fp;
5436 tree fn_context;
5437
5438 /* Find an existing trampoline and return it. */
5439 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5440 if (TREE_PURPOSE (link) == function)
5441 return
5442 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5443
5444 for (fp = outer_function_chain; fp; fp = fp->next)
5445 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5446 if (TREE_PURPOSE (link) == function)
5447 {
5448 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5449 function);
5450 return round_trampoline_addr (tramp);
5451 }
5452
5453 /* None exists; we must make one. */
5454
5455 /* Find the `struct function' for the function containing FUNCTION. */
5456 fp = 0;
5457 fn_context = decl_function_context (function);
5458 if (fn_context != current_function_decl
5459 && fn_context != inline_function_decl)
5460 for (fp = outer_function_chain; fp; fp = fp->next)
5461 if (fp->decl == fn_context)
5462 break;
5463
5464 /* Allocate run-time space for this trampoline
5465 (usually in the defining function's stack frame). */
5466 #ifdef ALLOCATE_TRAMPOLINE
5467 tramp = ALLOCATE_TRAMPOLINE (fp);
5468 #else
5469 /* If rounding needed, allocate extra space
5470 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5471 #ifdef TRAMPOLINE_ALIGNMENT
5472 #define TRAMPOLINE_REAL_SIZE \
5473 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5474 #else
5475 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5476 #endif
5477 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5478 fp ? fp : cfun);
5479 #endif
5480
5481 /* Record the trampoline for reuse and note it for later initialization
5482 by expand_function_end. */
5483 if (fp != 0)
5484 {
5485 push_obstacks (fp->function_maybepermanent_obstack,
5486 fp->function_maybepermanent_obstack);
5487 rtlexp = make_node (RTL_EXPR);
5488 RTL_EXPR_RTL (rtlexp) = tramp;
5489 fp->x_trampoline_list = tree_cons (function, rtlexp,
5490 fp->x_trampoline_list);
5491 pop_obstacks ();
5492 }
5493 else
5494 {
5495 /* Make the RTL_EXPR node temporary, not momentary, so that the
5496 trampoline_list doesn't become garbage. */
5497 int momentary = suspend_momentary ();
5498 rtlexp = make_node (RTL_EXPR);
5499 resume_momentary (momentary);
5500
5501 RTL_EXPR_RTL (rtlexp) = tramp;
5502 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5503 }
5504
5505 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5506 return round_trampoline_addr (tramp);
5507 }
5508
5509 /* Given a trampoline address,
5510 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5511
5512 static rtx
5513 round_trampoline_addr (tramp)
5514 rtx tramp;
5515 {
5516 #ifdef TRAMPOLINE_ALIGNMENT
5517 /* Round address up to desired boundary. */
5518 rtx temp = gen_reg_rtx (Pmode);
5519 temp = expand_binop (Pmode, add_optab, tramp,
5520 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5521 temp, 0, OPTAB_LIB_WIDEN);
5522 tramp = expand_binop (Pmode, and_optab, temp,
5523 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5524 temp, 0, OPTAB_LIB_WIDEN);
5525 #endif
5526 return tramp;
5527 }
5528 \f
5529 /* Put all this function's BLOCK nodes including those that are chained
5530 onto the first block into a vector, and return it.
5531 Also store in each NOTE for the beginning or end of a block
5532 the index of that block in the vector.
5533 The arguments are BLOCK, the chain of top-level blocks of the function,
5534 and INSNS, the insn chain of the function. */
5535
5536 void
5537 identify_blocks ()
5538 {
5539 int n_blocks;
5540 tree *block_vector, *last_block_vector;
5541 tree *block_stack;
5542 tree block = DECL_INITIAL (current_function_decl);
5543
5544 if (block == 0)
5545 return;
5546
5547 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5548 depth-first order. */
5549 block_vector = get_block_vector (block, &n_blocks);
5550 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5551
5552 last_block_vector = identify_blocks_1 (get_insns (),
5553 block_vector + 1,
5554 block_vector + n_blocks,
5555 block_stack);
5556
5557 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5558 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5559 if (0 && last_block_vector != block_vector + n_blocks)
5560 abort ();
5561
5562 free (block_vector);
5563 free (block_stack);
5564 }
5565
5566 /* Subroutine of identify_blocks. Do the block substitution on the
5567 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5568
5569 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5570 BLOCK_VECTOR is incremented for each block seen. */
5571
5572 static tree *
5573 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5574 rtx insns;
5575 tree *block_vector;
5576 tree *end_block_vector;
5577 tree *orig_block_stack;
5578 {
5579 rtx insn;
5580 tree *block_stack = orig_block_stack;
5581
5582 for (insn = insns; insn; insn = NEXT_INSN (insn))
5583 {
5584 if (GET_CODE (insn) == NOTE)
5585 {
5586 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5587 {
5588 tree b;
5589
5590 /* If there are more block notes than BLOCKs, something
5591 is badly wrong. */
5592 if (block_vector == end_block_vector)
5593 abort ();
5594
5595 b = *block_vector++;
5596 NOTE_BLOCK (insn) = b;
5597 *block_stack++ = b;
5598 }
5599 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5600 {
5601 /* If there are more NOTE_INSN_BLOCK_ENDs than
5602 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5603 if (block_stack == orig_block_stack)
5604 abort ();
5605
5606 NOTE_BLOCK (insn) = *--block_stack;
5607 }
5608 }
5609 else if (GET_CODE (insn) == CALL_INSN
5610 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5611 {
5612 rtx cp = PATTERN (insn);
5613
5614 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5615 end_block_vector, block_stack);
5616 if (XEXP (cp, 1))
5617 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5618 end_block_vector, block_stack);
5619 if (XEXP (cp, 2))
5620 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5621 end_block_vector, block_stack);
5622 }
5623 }
5624
5625 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5626 something is badly wrong. */
5627 if (block_stack != orig_block_stack)
5628 abort ();
5629
5630 return block_vector;
5631 }
5632
5633 /* Identify BLOCKs referenced by more than one
5634 NOTE_INSN_BLOCK_{BEG,END}, and create duplicate blocks. */
5635
5636 void
5637 reorder_blocks ()
5638 {
5639 tree block = DECL_INITIAL (current_function_decl);
5640 varray_type block_stack;
5641
5642 if (block == NULL_TREE)
5643 return;
5644
5645 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5646
5647 /* Prune the old trees away, so that they don't get in the way. */
5648 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5649 BLOCK_CHAIN (block) = NULL_TREE;
5650
5651 reorder_blocks_1 (get_insns (), block, &block_stack);
5652
5653 BLOCK_SUBBLOCKS (block)
5654 = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5655
5656 VARRAY_FREE (block_stack);
5657 }
5658
5659 /* Helper function for reorder_blocks. Process the insn chain beginning
5660 at INSNS. Recurse for CALL_PLACEHOLDER insns. */
5661
5662 static void
5663 reorder_blocks_1 (insns, current_block, p_block_stack)
5664 rtx insns;
5665 tree current_block;
5666 varray_type *p_block_stack;
5667 {
5668 rtx insn;
5669
5670 for (insn = insns; insn; insn = NEXT_INSN (insn))
5671 {
5672 if (GET_CODE (insn) == NOTE)
5673 {
5674 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5675 {
5676 tree block = NOTE_BLOCK (insn);
5677 /* If we have seen this block before, copy it. */
5678 if (TREE_ASM_WRITTEN (block))
5679 {
5680 block = copy_node (block);
5681 NOTE_BLOCK (insn) = block;
5682 }
5683 BLOCK_SUBBLOCKS (block) = 0;
5684 TREE_ASM_WRITTEN (block) = 1;
5685 BLOCK_SUPERCONTEXT (block) = current_block;
5686 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5687 BLOCK_SUBBLOCKS (current_block) = block;
5688 current_block = block;
5689 VARRAY_PUSH_TREE (*p_block_stack, block);
5690 }
5691 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5692 {
5693 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5694 VARRAY_POP (*p_block_stack);
5695 BLOCK_SUBBLOCKS (current_block)
5696 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5697 current_block = BLOCK_SUPERCONTEXT (current_block);
5698 }
5699 }
5700 else if (GET_CODE (insn) == CALL_INSN
5701 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5702 {
5703 rtx cp = PATTERN (insn);
5704 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5705 if (XEXP (cp, 1))
5706 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5707 if (XEXP (cp, 2))
5708 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5709 }
5710 }
5711 }
5712
5713 /* Reverse the order of elements in the chain T of blocks,
5714 and return the new head of the chain (old last element). */
5715
5716 static tree
5717 blocks_nreverse (t)
5718 tree t;
5719 {
5720 register tree prev = 0, decl, next;
5721 for (decl = t; decl; decl = next)
5722 {
5723 next = BLOCK_CHAIN (decl);
5724 BLOCK_CHAIN (decl) = prev;
5725 prev = decl;
5726 }
5727 return prev;
5728 }
5729
5730 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
5731 non-NULL, list them all into VECTOR, in a depth-first preorder
5732 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
5733 blocks. */
5734
5735 static int
5736 all_blocks (block, vector)
5737 tree block;
5738 tree *vector;
5739 {
5740 int n_blocks = 0;
5741
5742 while (block)
5743 {
5744 TREE_ASM_WRITTEN (block) = 0;
5745
5746 /* Record this block. */
5747 if (vector)
5748 vector[n_blocks] = block;
5749
5750 ++n_blocks;
5751
5752 /* Record the subblocks, and their subblocks... */
5753 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5754 vector ? vector + n_blocks : 0);
5755 block = BLOCK_CHAIN (block);
5756 }
5757
5758 return n_blocks;
5759 }
5760
5761 /* Return a vector containing all the blocks rooted at BLOCK. The
5762 number of elements in the vector is stored in N_BLOCKS_P. The
5763 vector is dynamically allocated; it is the caller's responsibility
5764 to call `free' on the pointer returned. */
5765
5766 static tree *
5767 get_block_vector (block, n_blocks_p)
5768 tree block;
5769 int *n_blocks_p;
5770 {
5771 tree *block_vector;
5772
5773 *n_blocks_p = all_blocks (block, NULL);
5774 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
5775 all_blocks (block, block_vector);
5776
5777 return block_vector;
5778 }
5779
5780 static int next_block_index = 2;
5781
5782 /* Set BLOCK_NUMBER for all the blocks in FN. */
5783
5784 void
5785 number_blocks (fn)
5786 tree fn;
5787 {
5788 int i;
5789 int n_blocks;
5790 tree *block_vector;
5791
5792 /* For SDB and XCOFF debugging output, we start numbering the blocks
5793 from 1 within each function, rather than keeping a running
5794 count. */
5795 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
5796 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
5797 next_block_index = 1;
5798 #endif
5799
5800 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
5801
5802 /* The top-level BLOCK isn't numbered at all. */
5803 for (i = 1; i < n_blocks; ++i)
5804 /* We number the blocks from two. */
5805 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
5806
5807 free (block_vector);
5808
5809 return;
5810 }
5811
5812 \f
5813 /* Allocate a function structure and reset its contents to the defaults. */
5814 static void
5815 prepare_function_start ()
5816 {
5817 cfun = (struct function *) xcalloc (1, sizeof (struct function));
5818
5819 init_stmt_for_function ();
5820 init_eh_for_function ();
5821
5822 cse_not_expected = ! optimize;
5823
5824 /* Caller save not needed yet. */
5825 caller_save_needed = 0;
5826
5827 /* No stack slots have been made yet. */
5828 stack_slot_list = 0;
5829
5830 current_function_has_nonlocal_label = 0;
5831 current_function_has_nonlocal_goto = 0;
5832
5833 /* There is no stack slot for handling nonlocal gotos. */
5834 nonlocal_goto_handler_slots = 0;
5835 nonlocal_goto_stack_level = 0;
5836
5837 /* No labels have been declared for nonlocal use. */
5838 nonlocal_labels = 0;
5839 nonlocal_goto_handler_labels = 0;
5840
5841 /* No function calls so far in this function. */
5842 function_call_count = 0;
5843
5844 /* No parm regs have been allocated.
5845 (This is important for output_inline_function.) */
5846 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5847
5848 /* Initialize the RTL mechanism. */
5849 init_emit ();
5850
5851 /* Initialize the queue of pending postincrement and postdecrements,
5852 and some other info in expr.c. */
5853 init_expr ();
5854
5855 /* We haven't done register allocation yet. */
5856 reg_renumber = 0;
5857
5858 init_varasm_status (cfun);
5859
5860 /* Clear out data used for inlining. */
5861 cfun->inlinable = 0;
5862 cfun->original_decl_initial = 0;
5863 cfun->original_arg_vector = 0;
5864
5865 #ifdef STACK_BOUNDARY
5866 cfun->stack_alignment_needed = STACK_BOUNDARY;
5867 cfun->preferred_stack_boundary = STACK_BOUNDARY;
5868 #else
5869 cfun->stack_alignment_needed = 0;
5870 cfun->preferred_stack_boundary = 0;
5871 #endif
5872
5873 /* Set if a call to setjmp is seen. */
5874 current_function_calls_setjmp = 0;
5875
5876 /* Set if a call to longjmp is seen. */
5877 current_function_calls_longjmp = 0;
5878
5879 current_function_calls_alloca = 0;
5880 current_function_contains_functions = 0;
5881 current_function_is_leaf = 0;
5882 current_function_nothrow = 0;
5883 current_function_sp_is_unchanging = 0;
5884 current_function_uses_only_leaf_regs = 0;
5885 current_function_has_computed_jump = 0;
5886 current_function_is_thunk = 0;
5887
5888 current_function_returns_pcc_struct = 0;
5889 current_function_returns_struct = 0;
5890 current_function_epilogue_delay_list = 0;
5891 current_function_uses_const_pool = 0;
5892 current_function_uses_pic_offset_table = 0;
5893 current_function_cannot_inline = 0;
5894
5895 /* We have not yet needed to make a label to jump to for tail-recursion. */
5896 tail_recursion_label = 0;
5897
5898 /* We haven't had a need to make a save area for ap yet. */
5899 arg_pointer_save_area = 0;
5900
5901 /* No stack slots allocated yet. */
5902 frame_offset = 0;
5903
5904 /* No SAVE_EXPRs in this function yet. */
5905 save_expr_regs = 0;
5906
5907 /* No RTL_EXPRs in this function yet. */
5908 rtl_expr_chain = 0;
5909
5910 /* Set up to allocate temporaries. */
5911 init_temp_slots ();
5912
5913 /* Indicate that we need to distinguish between the return value of the
5914 present function and the return value of a function being called. */
5915 rtx_equal_function_value_matters = 1;
5916
5917 /* Indicate that we have not instantiated virtual registers yet. */
5918 virtuals_instantiated = 0;
5919
5920 /* Indicate we have no need of a frame pointer yet. */
5921 frame_pointer_needed = 0;
5922
5923 /* By default assume not varargs or stdarg. */
5924 current_function_varargs = 0;
5925 current_function_stdarg = 0;
5926
5927 /* We haven't made any trampolines for this function yet. */
5928 trampoline_list = 0;
5929
5930 init_pending_stack_adjust ();
5931 inhibit_defer_pop = 0;
5932
5933 current_function_outgoing_args_size = 0;
5934
5935 if (init_lang_status)
5936 (*init_lang_status) (cfun);
5937 if (init_machine_status)
5938 (*init_machine_status) (cfun);
5939 }
5940
5941 /* Initialize the rtl expansion mechanism so that we can do simple things
5942 like generate sequences. This is used to provide a context during global
5943 initialization of some passes. */
5944 void
5945 init_dummy_function_start ()
5946 {
5947 prepare_function_start ();
5948 }
5949
5950 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5951 and initialize static variables for generating RTL for the statements
5952 of the function. */
5953
5954 void
5955 init_function_start (subr, filename, line)
5956 tree subr;
5957 const char *filename;
5958 int line;
5959 {
5960 prepare_function_start ();
5961
5962 /* Remember this function for later. */
5963 cfun->next_global = all_functions;
5964 all_functions = cfun;
5965
5966 current_function_name = (*decl_printable_name) (subr, 2);
5967 cfun->decl = subr;
5968
5969 /* Nonzero if this is a nested function that uses a static chain. */
5970
5971 current_function_needs_context
5972 = (decl_function_context (current_function_decl) != 0
5973 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5974
5975 /* Within function body, compute a type's size as soon it is laid out. */
5976 immediate_size_expand++;
5977
5978 /* Prevent ever trying to delete the first instruction of a function.
5979 Also tell final how to output a linenum before the function prologue.
5980 Note linenums could be missing, e.g. when compiling a Java .class file. */
5981 if (line > 0)
5982 emit_line_note (filename, line);
5983
5984 /* Make sure first insn is a note even if we don't want linenums.
5985 This makes sure the first insn will never be deleted.
5986 Also, final expects a note to appear there. */
5987 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5988
5989 /* Set flags used by final.c. */
5990 if (aggregate_value_p (DECL_RESULT (subr)))
5991 {
5992 #ifdef PCC_STATIC_STRUCT_RETURN
5993 current_function_returns_pcc_struct = 1;
5994 #endif
5995 current_function_returns_struct = 1;
5996 }
5997
5998 /* Warn if this value is an aggregate type,
5999 regardless of which calling convention we are using for it. */
6000 if (warn_aggregate_return
6001 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6002 warning ("function returns an aggregate");
6003
6004 current_function_returns_pointer
6005 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6006 }
6007
6008 /* Make sure all values used by the optimization passes have sane
6009 defaults. */
6010 void
6011 init_function_for_compilation ()
6012 {
6013 reg_renumber = 0;
6014
6015 /* No prologue/epilogue insns yet. */
6016 VARRAY_GROW (prologue, 0);
6017 VARRAY_GROW (epilogue, 0);
6018 VARRAY_GROW (sibcall_epilogue, 0);
6019 }
6020
6021 /* Indicate that the current function uses extra args
6022 not explicitly mentioned in the argument list in any fashion. */
6023
6024 void
6025 mark_varargs ()
6026 {
6027 current_function_varargs = 1;
6028 }
6029
6030 /* Expand a call to __main at the beginning of a possible main function. */
6031
6032 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6033 #undef HAS_INIT_SECTION
6034 #define HAS_INIT_SECTION
6035 #endif
6036
6037 void
6038 expand_main_function ()
6039 {
6040 #if !defined (HAS_INIT_SECTION)
6041 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
6042 VOIDmode, 0);
6043 #endif /* not HAS_INIT_SECTION */
6044 }
6045 \f
6046 extern struct obstack permanent_obstack;
6047
6048 /* Start the RTL for a new function, and set variables used for
6049 emitting RTL.
6050 SUBR is the FUNCTION_DECL node.
6051 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6052 the function's parameters, which must be run at any return statement. */
6053
6054 void
6055 expand_function_start (subr, parms_have_cleanups)
6056 tree subr;
6057 int parms_have_cleanups;
6058 {
6059 tree tem;
6060 rtx last_ptr = NULL_RTX;
6061
6062 /* Make sure volatile mem refs aren't considered
6063 valid operands of arithmetic insns. */
6064 init_recog_no_volatile ();
6065
6066 /* Set this before generating any memory accesses. */
6067 current_function_check_memory_usage
6068 = (flag_check_memory_usage
6069 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6070
6071 current_function_instrument_entry_exit
6072 = (flag_instrument_function_entry_exit
6073 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6074
6075 current_function_limit_stack
6076 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6077
6078 /* If function gets a static chain arg, store it in the stack frame.
6079 Do this first, so it gets the first stack slot offset. */
6080 if (current_function_needs_context)
6081 {
6082 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6083
6084 /* Delay copying static chain if it is not a register to avoid
6085 conflicts with regs used for parameters. */
6086 if (! SMALL_REGISTER_CLASSES
6087 || GET_CODE (static_chain_incoming_rtx) == REG)
6088 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6089 }
6090
6091 /* If the parameters of this function need cleaning up, get a label
6092 for the beginning of the code which executes those cleanups. This must
6093 be done before doing anything with return_label. */
6094 if (parms_have_cleanups)
6095 cleanup_label = gen_label_rtx ();
6096 else
6097 cleanup_label = 0;
6098
6099 /* Make the label for return statements to jump to, if this machine
6100 does not have a one-instruction return and uses an epilogue,
6101 or if it returns a structure, or if it has parm cleanups. */
6102 #ifdef HAVE_return
6103 if (cleanup_label == 0 && HAVE_return
6104 && ! current_function_instrument_entry_exit
6105 && ! current_function_returns_pcc_struct
6106 && ! (current_function_returns_struct && ! optimize))
6107 return_label = 0;
6108 else
6109 return_label = gen_label_rtx ();
6110 #else
6111 return_label = gen_label_rtx ();
6112 #endif
6113
6114 /* Initialize rtx used to return the value. */
6115 /* Do this before assign_parms so that we copy the struct value address
6116 before any library calls that assign parms might generate. */
6117
6118 /* Decide whether to return the value in memory or in a register. */
6119 if (aggregate_value_p (DECL_RESULT (subr)))
6120 {
6121 /* Returning something that won't go in a register. */
6122 register rtx value_address = 0;
6123
6124 #ifdef PCC_STATIC_STRUCT_RETURN
6125 if (current_function_returns_pcc_struct)
6126 {
6127 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6128 value_address = assemble_static_space (size);
6129 }
6130 else
6131 #endif
6132 {
6133 /* Expect to be passed the address of a place to store the value.
6134 If it is passed as an argument, assign_parms will take care of
6135 it. */
6136 if (struct_value_incoming_rtx)
6137 {
6138 value_address = gen_reg_rtx (Pmode);
6139 emit_move_insn (value_address, struct_value_incoming_rtx);
6140 }
6141 }
6142 if (value_address)
6143 {
6144 DECL_RTL (DECL_RESULT (subr))
6145 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6146 set_mem_attributes (DECL_RTL (DECL_RESULT (subr)),
6147 DECL_RESULT (subr), 1);
6148 }
6149 }
6150 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6151 /* If return mode is void, this decl rtl should not be used. */
6152 DECL_RTL (DECL_RESULT (subr)) = 0;
6153 else if (parms_have_cleanups || current_function_instrument_entry_exit)
6154 {
6155 /* If function will end with cleanup code for parms,
6156 compute the return values into a pseudo reg,
6157 which we will copy into the true return register
6158 after the cleanups are done. */
6159
6160 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
6161
6162 #ifdef PROMOTE_FUNCTION_RETURN
6163 tree type = TREE_TYPE (DECL_RESULT (subr));
6164 int unsignedp = TREE_UNSIGNED (type);
6165
6166 mode = promote_mode (type, mode, &unsignedp, 1);
6167 #endif
6168
6169 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6170 }
6171 else
6172 /* Scalar, returned in a register. */
6173 {
6174 DECL_RTL (DECL_RESULT (subr))
6175 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)), subr, 1);
6176
6177 /* Mark this reg as the function's return value. */
6178 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6179 {
6180 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6181 /* Needed because we may need to move this to memory
6182 in case it's a named return value whose address is taken. */
6183 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6184 }
6185 }
6186
6187 /* Initialize rtx for parameters and local variables.
6188 In some cases this requires emitting insns. */
6189
6190 assign_parms (subr);
6191
6192 /* Copy the static chain now if it wasn't a register. The delay is to
6193 avoid conflicts with the parameter passing registers. */
6194
6195 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6196 if (GET_CODE (static_chain_incoming_rtx) != REG)
6197 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6198
6199 /* The following was moved from init_function_start.
6200 The move is supposed to make sdb output more accurate. */
6201 /* Indicate the beginning of the function body,
6202 as opposed to parm setup. */
6203 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6204
6205 if (GET_CODE (get_last_insn ()) != NOTE)
6206 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6207 parm_birth_insn = get_last_insn ();
6208
6209 context_display = 0;
6210 if (current_function_needs_context)
6211 {
6212 /* Fetch static chain values for containing functions. */
6213 tem = decl_function_context (current_function_decl);
6214 /* Copy the static chain pointer into a pseudo. If we have
6215 small register classes, copy the value from memory if
6216 static_chain_incoming_rtx is a REG. */
6217 if (tem)
6218 {
6219 /* If the static chain originally came in a register, put it back
6220 there, then move it out in the next insn. The reason for
6221 this peculiar code is to satisfy function integration. */
6222 if (SMALL_REGISTER_CLASSES
6223 && GET_CODE (static_chain_incoming_rtx) == REG)
6224 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6225 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6226 }
6227
6228 while (tem)
6229 {
6230 tree rtlexp = make_node (RTL_EXPR);
6231
6232 RTL_EXPR_RTL (rtlexp) = last_ptr;
6233 context_display = tree_cons (tem, rtlexp, context_display);
6234 tem = decl_function_context (tem);
6235 if (tem == 0)
6236 break;
6237 /* Chain thru stack frames, assuming pointer to next lexical frame
6238 is found at the place we always store it. */
6239 #ifdef FRAME_GROWS_DOWNWARD
6240 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6241 #endif
6242 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6243 MEM_ALIAS_SET (last_ptr) = get_frame_alias_set ();
6244 last_ptr = copy_to_reg (last_ptr);
6245
6246 /* If we are not optimizing, ensure that we know that this
6247 piece of context is live over the entire function. */
6248 if (! optimize)
6249 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6250 save_expr_regs);
6251 }
6252 }
6253
6254 if (current_function_instrument_entry_exit)
6255 {
6256 rtx fun = DECL_RTL (current_function_decl);
6257 if (GET_CODE (fun) == MEM)
6258 fun = XEXP (fun, 0);
6259 else
6260 abort ();
6261 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6262 fun, Pmode,
6263 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6264 0,
6265 hard_frame_pointer_rtx),
6266 Pmode);
6267 }
6268
6269 /* After the display initializations is where the tail-recursion label
6270 should go, if we end up needing one. Ensure we have a NOTE here
6271 since some things (like trampolines) get placed before this. */
6272 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6273
6274 /* Evaluate now the sizes of any types declared among the arguments. */
6275 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6276 {
6277 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6278 EXPAND_MEMORY_USE_BAD);
6279 /* Flush the queue in case this parameter declaration has
6280 side-effects. */
6281 emit_queue ();
6282 }
6283
6284 /* Make sure there is a line number after the function entry setup code. */
6285 force_next_line_note ();
6286 }
6287 \f
6288 /* Undo the effects of init_dummy_function_start. */
6289 void
6290 expand_dummy_function_end ()
6291 {
6292 /* End any sequences that failed to be closed due to syntax errors. */
6293 while (in_sequence_p ())
6294 end_sequence ();
6295
6296 /* Outside function body, can't compute type's actual size
6297 until next function's body starts. */
6298
6299 free_after_parsing (cfun);
6300 free_after_compilation (cfun);
6301 free (cfun);
6302 cfun = 0;
6303 }
6304
6305 /* Call DOIT for each hard register used as a return value from
6306 the current function. */
6307
6308 void
6309 diddle_return_value (doit, arg)
6310 void (*doit) PARAMS ((rtx, void *));
6311 void *arg;
6312 {
6313 rtx outgoing = current_function_return_rtx;
6314 int pcc;
6315
6316 if (! outgoing)
6317 return;
6318
6319 pcc = (current_function_returns_struct
6320 || current_function_returns_pcc_struct);
6321
6322 if ((GET_CODE (outgoing) == REG
6323 && REGNO (outgoing) >= FIRST_PSEUDO_REGISTER)
6324 || pcc)
6325 {
6326 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6327
6328 /* A PCC-style return returns a pointer to the memory in which
6329 the structure is stored. */
6330 if (pcc)
6331 type = build_pointer_type (type);
6332
6333 #ifdef FUNCTION_OUTGOING_VALUE
6334 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
6335 #else
6336 outgoing = FUNCTION_VALUE (type, current_function_decl);
6337 #endif
6338 /* If this is a BLKmode structure being returned in registers, then use
6339 the mode computed in expand_return. */
6340 if (GET_MODE (outgoing) == BLKmode)
6341 PUT_MODE (outgoing,
6342 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6343 REG_FUNCTION_VALUE_P (outgoing) = 1;
6344 }
6345
6346 if (GET_CODE (outgoing) == REG)
6347 (*doit) (outgoing, arg);
6348 else if (GET_CODE (outgoing) == PARALLEL)
6349 {
6350 int i;
6351
6352 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6353 {
6354 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6355
6356 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6357 (*doit) (x, arg);
6358 }
6359 }
6360 }
6361
6362 static void
6363 do_clobber_return_reg (reg, arg)
6364 rtx reg;
6365 void *arg ATTRIBUTE_UNUSED;
6366 {
6367 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6368 }
6369
6370 void
6371 clobber_return_register ()
6372 {
6373 diddle_return_value (do_clobber_return_reg, NULL);
6374 }
6375
6376 static void
6377 do_use_return_reg (reg, arg)
6378 rtx reg;
6379 void *arg ATTRIBUTE_UNUSED;
6380 {
6381 emit_insn (gen_rtx_USE (VOIDmode, reg));
6382 }
6383
6384 void
6385 use_return_register ()
6386 {
6387 diddle_return_value (do_use_return_reg, NULL);
6388 }
6389
6390 /* Generate RTL for the end of the current function.
6391 FILENAME and LINE are the current position in the source file.
6392
6393 It is up to language-specific callers to do cleanups for parameters--
6394 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6395
6396 void
6397 expand_function_end (filename, line, end_bindings)
6398 const char *filename;
6399 int line;
6400 int end_bindings;
6401 {
6402 tree link;
6403
6404 #ifdef TRAMPOLINE_TEMPLATE
6405 static rtx initial_trampoline;
6406 #endif
6407
6408 finish_expr_for_function ();
6409
6410 #ifdef NON_SAVING_SETJMP
6411 /* Don't put any variables in registers if we call setjmp
6412 on a machine that fails to restore the registers. */
6413 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6414 {
6415 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6416 setjmp_protect (DECL_INITIAL (current_function_decl));
6417
6418 setjmp_protect_args ();
6419 }
6420 #endif
6421
6422 /* Save the argument pointer if a save area was made for it. */
6423 if (arg_pointer_save_area)
6424 {
6425 /* arg_pointer_save_area may not be a valid memory address, so we
6426 have to check it and fix it if necessary. */
6427 rtx seq;
6428 start_sequence ();
6429 emit_move_insn (validize_mem (arg_pointer_save_area),
6430 virtual_incoming_args_rtx);
6431 seq = gen_sequence ();
6432 end_sequence ();
6433 emit_insn_before (seq, tail_recursion_reentry);
6434 }
6435
6436 /* Initialize any trampolines required by this function. */
6437 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6438 {
6439 tree function = TREE_PURPOSE (link);
6440 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6441 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6442 #ifdef TRAMPOLINE_TEMPLATE
6443 rtx blktramp;
6444 #endif
6445 rtx seq;
6446
6447 #ifdef TRAMPOLINE_TEMPLATE
6448 /* First make sure this compilation has a template for
6449 initializing trampolines. */
6450 if (initial_trampoline == 0)
6451 {
6452 end_temporary_allocation ();
6453 initial_trampoline
6454 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6455 resume_temporary_allocation ();
6456
6457 ggc_add_rtx_root (&initial_trampoline, 1);
6458 }
6459 #endif
6460
6461 /* Generate insns to initialize the trampoline. */
6462 start_sequence ();
6463 tramp = round_trampoline_addr (XEXP (tramp, 0));
6464 #ifdef TRAMPOLINE_TEMPLATE
6465 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6466 emit_block_move (blktramp, initial_trampoline,
6467 GEN_INT (TRAMPOLINE_SIZE),
6468 TRAMPOLINE_ALIGNMENT);
6469 #endif
6470 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6471 seq = get_insns ();
6472 end_sequence ();
6473
6474 /* Put those insns at entry to the containing function (this one). */
6475 emit_insns_before (seq, tail_recursion_reentry);
6476 }
6477
6478 /* If we are doing stack checking and this function makes calls,
6479 do a stack probe at the start of the function to ensure we have enough
6480 space for another stack frame. */
6481 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6482 {
6483 rtx insn, seq;
6484
6485 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6486 if (GET_CODE (insn) == CALL_INSN)
6487 {
6488 start_sequence ();
6489 probe_stack_range (STACK_CHECK_PROTECT,
6490 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6491 seq = get_insns ();
6492 end_sequence ();
6493 emit_insns_before (seq, tail_recursion_reentry);
6494 break;
6495 }
6496 }
6497
6498 /* Warn about unused parms if extra warnings were specified. */
6499 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6500 warning. WARN_UNUSED_PARAMETER is negative when set by
6501 -Wunused. */
6502 if (warn_unused_parameter > 0
6503 || (warn_unused_parameter < 0 && extra_warnings))
6504 {
6505 tree decl;
6506
6507 for (decl = DECL_ARGUMENTS (current_function_decl);
6508 decl; decl = TREE_CHAIN (decl))
6509 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6510 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6511 warning_with_decl (decl, "unused parameter `%s'");
6512 }
6513
6514 /* Delete handlers for nonlocal gotos if nothing uses them. */
6515 if (nonlocal_goto_handler_slots != 0
6516 && ! current_function_has_nonlocal_label)
6517 delete_handlers ();
6518
6519 /* End any sequences that failed to be closed due to syntax errors. */
6520 while (in_sequence_p ())
6521 end_sequence ();
6522
6523 /* Outside function body, can't compute type's actual size
6524 until next function's body starts. */
6525 immediate_size_expand--;
6526
6527 clear_pending_stack_adjust ();
6528 do_pending_stack_adjust ();
6529
6530 /* Mark the end of the function body.
6531 If control reaches this insn, the function can drop through
6532 without returning a value. */
6533 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6534
6535 /* Must mark the last line number note in the function, so that the test
6536 coverage code can avoid counting the last line twice. This just tells
6537 the code to ignore the immediately following line note, since there
6538 already exists a copy of this note somewhere above. This line number
6539 note is still needed for debugging though, so we can't delete it. */
6540 if (flag_test_coverage)
6541 emit_note (NULL_PTR, NOTE_INSN_REPEATED_LINE_NUMBER);
6542
6543 /* Output a linenumber for the end of the function.
6544 SDB depends on this. */
6545 emit_line_note_force (filename, line);
6546
6547 /* Output the label for the actual return from the function,
6548 if one is expected. This happens either because a function epilogue
6549 is used instead of a return instruction, or because a return was done
6550 with a goto in order to run local cleanups, or because of pcc-style
6551 structure returning. */
6552
6553 if (return_label)
6554 {
6555 /* Before the return label, clobber the return registers so that
6556 they are not propogated live to the rest of the function. This
6557 can only happen with functions that drop through; if there had
6558 been a return statement, there would have either been a return
6559 rtx, or a jump to the return label. */
6560 clobber_return_register ();
6561
6562 emit_label (return_label);
6563 }
6564
6565 /* C++ uses this. */
6566 if (end_bindings)
6567 expand_end_bindings (0, 0, 0);
6568
6569 /* Now handle any leftover exception regions that may have been
6570 created for the parameters. */
6571 {
6572 rtx last = get_last_insn ();
6573 rtx label;
6574
6575 expand_leftover_cleanups ();
6576
6577 /* If there are any catch_clauses remaining, output them now. */
6578 emit_insns (catch_clauses);
6579 catch_clauses = catch_clauses_last = NULL_RTX;
6580 /* If the above emitted any code, may sure we jump around it. */
6581 if (last != get_last_insn ())
6582 {
6583 label = gen_label_rtx ();
6584 last = emit_jump_insn_after (gen_jump (label), last);
6585 last = emit_barrier_after (last);
6586 emit_label (label);
6587 }
6588 }
6589
6590 if (current_function_instrument_entry_exit)
6591 {
6592 rtx fun = DECL_RTL (current_function_decl);
6593 if (GET_CODE (fun) == MEM)
6594 fun = XEXP (fun, 0);
6595 else
6596 abort ();
6597 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6598 fun, Pmode,
6599 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6600 0,
6601 hard_frame_pointer_rtx),
6602 Pmode);
6603 }
6604
6605 /* If we had calls to alloca, and this machine needs
6606 an accurate stack pointer to exit the function,
6607 insert some code to save and restore the stack pointer. */
6608 #ifdef EXIT_IGNORE_STACK
6609 if (! EXIT_IGNORE_STACK)
6610 #endif
6611 if (current_function_calls_alloca)
6612 {
6613 rtx tem = 0;
6614
6615 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6616 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6617 }
6618
6619 /* If scalar return value was computed in a pseudo-reg,
6620 copy that to the hard return register. */
6621 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6622 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6623 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6624 >= FIRST_PSEUDO_REGISTER))
6625 {
6626 rtx real_decl_result;
6627
6628 #ifdef FUNCTION_OUTGOING_VALUE
6629 real_decl_result
6630 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6631 current_function_decl);
6632 #else
6633 real_decl_result
6634 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6635 current_function_decl);
6636 #endif
6637 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6638 /* If this is a BLKmode structure being returned in registers, then use
6639 the mode computed in expand_return. */
6640 if (GET_MODE (real_decl_result) == BLKmode)
6641 PUT_MODE (real_decl_result,
6642 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6643 emit_move_insn (real_decl_result,
6644 DECL_RTL (DECL_RESULT (current_function_decl)));
6645
6646 /* The delay slot scheduler assumes that current_function_return_rtx
6647 holds the hard register containing the return value, not a temporary
6648 pseudo. */
6649 current_function_return_rtx = real_decl_result;
6650 }
6651
6652 /* If returning a structure, arrange to return the address of the value
6653 in a place where debuggers expect to find it.
6654
6655 If returning a structure PCC style,
6656 the caller also depends on this value.
6657 And current_function_returns_pcc_struct is not necessarily set. */
6658 if (current_function_returns_struct
6659 || current_function_returns_pcc_struct)
6660 {
6661 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6662 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6663 #ifdef FUNCTION_OUTGOING_VALUE
6664 rtx outgoing
6665 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6666 current_function_decl);
6667 #else
6668 rtx outgoing
6669 = FUNCTION_VALUE (build_pointer_type (type),
6670 current_function_decl);
6671 #endif
6672
6673 /* Mark this as a function return value so integrate will delete the
6674 assignment and USE below when inlining this function. */
6675 REG_FUNCTION_VALUE_P (outgoing) = 1;
6676
6677 emit_move_insn (outgoing, value_address);
6678 }
6679
6680 /* ??? This should no longer be necessary since stupid is no longer with
6681 us, but there are some parts of the compiler (eg reload_combine, and
6682 sh mach_dep_reorg) that still try and compute their own lifetime info
6683 instead of using the general framework. */
6684 use_return_register ();
6685
6686 /* If this is an implementation of __throw, do what's necessary to
6687 communicate between __builtin_eh_return and the epilogue. */
6688 expand_eh_return ();
6689
6690 /* Output a return insn if we are using one.
6691 Otherwise, let the rtl chain end here, to drop through
6692 into the epilogue. */
6693
6694 #ifdef HAVE_return
6695 if (HAVE_return)
6696 {
6697 emit_jump_insn (gen_return ());
6698 emit_barrier ();
6699 }
6700 #endif
6701
6702 /* Fix up any gotos that jumped out to the outermost
6703 binding level of the function.
6704 Must follow emitting RETURN_LABEL. */
6705
6706 /* If you have any cleanups to do at this point,
6707 and they need to create temporary variables,
6708 then you will lose. */
6709 expand_fixups (get_insns ());
6710 }
6711 \f
6712 /* Extend a vector that records the INSN_UIDs of INSNS (either a
6713 sequence or a single insn). */
6714
6715 static void
6716 record_insns (insns, vecp)
6717 rtx insns;
6718 varray_type *vecp;
6719 {
6720 if (GET_CODE (insns) == SEQUENCE)
6721 {
6722 int len = XVECLEN (insns, 0);
6723 int i = VARRAY_SIZE (*vecp);
6724
6725 VARRAY_GROW (*vecp, i + len);
6726 while (--len >= 0)
6727 {
6728 VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
6729 ++i;
6730 }
6731 }
6732 else
6733 {
6734 int i = VARRAY_SIZE (*vecp);
6735 VARRAY_GROW (*vecp, i + 1);
6736 VARRAY_INT (*vecp, i) = INSN_UID (insns);
6737 }
6738 }
6739
6740 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6741
6742 static int
6743 contains (insn, vec)
6744 rtx insn;
6745 varray_type vec;
6746 {
6747 register int i, j;
6748
6749 if (GET_CODE (insn) == INSN
6750 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6751 {
6752 int count = 0;
6753 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6754 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
6755 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
6756 count++;
6757 return count;
6758 }
6759 else
6760 {
6761 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
6762 if (INSN_UID (insn) == VARRAY_INT (vec, j))
6763 return 1;
6764 }
6765 return 0;
6766 }
6767
6768 int
6769 prologue_epilogue_contains (insn)
6770 rtx insn;
6771 {
6772 if (contains (insn, prologue))
6773 return 1;
6774 if (contains (insn, epilogue))
6775 return 1;
6776 return 0;
6777 }
6778
6779 int
6780 sibcall_epilogue_contains (insn)
6781 rtx insn;
6782 {
6783 if (sibcall_epilogue)
6784 return contains (insn, sibcall_epilogue);
6785 return 0;
6786 }
6787
6788 #ifdef HAVE_return
6789 /* Insert gen_return at the end of block BB. This also means updating
6790 block_for_insn appropriately. */
6791
6792 static void
6793 emit_return_into_block (bb, line_note)
6794 basic_block bb;
6795 rtx line_note;
6796 {
6797 rtx p, end;
6798
6799 p = NEXT_INSN (bb->end);
6800 end = emit_jump_insn_after (gen_return (), bb->end);
6801 if (line_note)
6802 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
6803 NOTE_LINE_NUMBER (line_note), bb->end);
6804
6805 while (1)
6806 {
6807 set_block_for_insn (p, bb);
6808 if (p == bb->end)
6809 break;
6810 p = PREV_INSN (p);
6811 }
6812 bb->end = end;
6813 }
6814 #endif /* HAVE_return */
6815
6816 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6817 this into place with notes indicating where the prologue ends and where
6818 the epilogue begins. Update the basic block information when possible. */
6819
6820 void
6821 thread_prologue_and_epilogue_insns (f)
6822 rtx f ATTRIBUTE_UNUSED;
6823 {
6824 int inserted = 0;
6825 edge e;
6826 rtx seq;
6827 #ifdef HAVE_prologue
6828 rtx prologue_end = NULL_RTX;
6829 #endif
6830 #if defined (HAVE_epilogue) || defined(HAVE_return)
6831 rtx epilogue_end = NULL_RTX;
6832 #endif
6833
6834 #ifdef HAVE_prologue
6835 if (HAVE_prologue)
6836 {
6837 start_sequence ();
6838 seq = gen_prologue();
6839 emit_insn (seq);
6840
6841 /* Retain a map of the prologue insns. */
6842 if (GET_CODE (seq) != SEQUENCE)
6843 seq = get_insns ();
6844 record_insns (seq, &prologue);
6845 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6846
6847 seq = gen_sequence ();
6848 end_sequence ();
6849
6850 /* If optimization is off, and perhaps in an empty function,
6851 the entry block will have no successors. */
6852 if (ENTRY_BLOCK_PTR->succ)
6853 {
6854 /* Can't deal with multiple successsors of the entry block. */
6855 if (ENTRY_BLOCK_PTR->succ->succ_next)
6856 abort ();
6857
6858 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6859 inserted = 1;
6860 }
6861 else
6862 emit_insn_after (seq, f);
6863 }
6864 #endif
6865
6866 /* If the exit block has no non-fake predecessors, we don't need
6867 an epilogue. */
6868 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6869 if ((e->flags & EDGE_FAKE) == 0)
6870 break;
6871 if (e == NULL)
6872 goto epilogue_done;
6873
6874 #ifdef HAVE_return
6875 if (optimize && HAVE_return)
6876 {
6877 /* If we're allowed to generate a simple return instruction,
6878 then by definition we don't need a full epilogue. Examine
6879 the block that falls through to EXIT. If it does not
6880 contain any code, examine its predecessors and try to
6881 emit (conditional) return instructions. */
6882
6883 basic_block last;
6884 edge e_next;
6885 rtx label;
6886
6887 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6888 if (e->flags & EDGE_FALLTHRU)
6889 break;
6890 if (e == NULL)
6891 goto epilogue_done;
6892 last = e->src;
6893
6894 /* Verify that there are no active instructions in the last block. */
6895 label = last->end;
6896 while (label && GET_CODE (label) != CODE_LABEL)
6897 {
6898 if (active_insn_p (label))
6899 break;
6900 label = PREV_INSN (label);
6901 }
6902
6903 if (last->head == label && GET_CODE (label) == CODE_LABEL)
6904 {
6905 rtx epilogue_line_note = NULL_RTX;
6906
6907 /* Locate the line number associated with the closing brace,
6908 if we can find one. */
6909 for (seq = get_last_insn ();
6910 seq && ! active_insn_p (seq);
6911 seq = PREV_INSN (seq))
6912 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
6913 {
6914 epilogue_line_note = seq;
6915 break;
6916 }
6917
6918 for (e = last->pred; e ; e = e_next)
6919 {
6920 basic_block bb = e->src;
6921 rtx jump;
6922
6923 e_next = e->pred_next;
6924 if (bb == ENTRY_BLOCK_PTR)
6925 continue;
6926
6927 jump = bb->end;
6928 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
6929 continue;
6930
6931 /* If we have an unconditional jump, we can replace that
6932 with a simple return instruction. */
6933 if (simplejump_p (jump))
6934 {
6935 emit_return_into_block (bb, epilogue_line_note);
6936 flow_delete_insn (jump);
6937 }
6938
6939 /* If we have a conditional jump, we can try to replace
6940 that with a conditional return instruction. */
6941 else if (condjump_p (jump))
6942 {
6943 rtx ret, *loc;
6944
6945 ret = SET_SRC (PATTERN (jump));
6946 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
6947 loc = &XEXP (ret, 1);
6948 else
6949 loc = &XEXP (ret, 2);
6950 ret = gen_rtx_RETURN (VOIDmode);
6951
6952 if (! validate_change (jump, loc, ret, 0))
6953 continue;
6954 if (JUMP_LABEL (jump))
6955 LABEL_NUSES (JUMP_LABEL (jump))--;
6956
6957 /* If this block has only one successor, it both jumps
6958 and falls through to the fallthru block, so we can't
6959 delete the edge. */
6960 if (bb->succ->succ_next == NULL)
6961 continue;
6962 }
6963 else
6964 continue;
6965
6966 /* Fix up the CFG for the successful change we just made. */
6967 redirect_edge_succ (e, EXIT_BLOCK_PTR);
6968 }
6969
6970 /* Emit a return insn for the exit fallthru block. Whether
6971 this is still reachable will be determined later. */
6972
6973 emit_barrier_after (last->end);
6974 emit_return_into_block (last, epilogue_line_note);
6975 epilogue_end = last->end;
6976 goto epilogue_done;
6977 }
6978 }
6979 #endif
6980 #ifdef HAVE_epilogue
6981 if (HAVE_epilogue)
6982 {
6983 /* Find the edge that falls through to EXIT. Other edges may exist
6984 due to RETURN instructions, but those don't need epilogues.
6985 There really shouldn't be a mixture -- either all should have
6986 been converted or none, however... */
6987
6988 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6989 if (e->flags & EDGE_FALLTHRU)
6990 break;
6991 if (e == NULL)
6992 goto epilogue_done;
6993
6994 start_sequence ();
6995 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
6996
6997 seq = gen_epilogue ();
6998 emit_jump_insn (seq);
6999
7000 /* Retain a map of the epilogue insns. */
7001 if (GET_CODE (seq) != SEQUENCE)
7002 seq = get_insns ();
7003 record_insns (seq, &epilogue);
7004
7005 seq = gen_sequence ();
7006 end_sequence();
7007
7008 insert_insn_on_edge (seq, e);
7009 inserted = 1;
7010 }
7011 #endif
7012 epilogue_done:
7013
7014 if (inserted)
7015 commit_edge_insertions ();
7016
7017 #ifdef HAVE_sibcall_epilogue
7018 /* Emit sibling epilogues before any sibling call sites. */
7019 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
7020 {
7021 basic_block bb = e->src;
7022 rtx insn = bb->end;
7023 rtx i;
7024 rtx newinsn;
7025
7026 if (GET_CODE (insn) != CALL_INSN
7027 || ! SIBLING_CALL_P (insn))
7028 continue;
7029
7030 start_sequence ();
7031 seq = gen_sibcall_epilogue ();
7032 end_sequence ();
7033
7034 i = PREV_INSN (insn);
7035 newinsn = emit_insn_before (seq, insn);
7036
7037 /* Update the UID to basic block map. */
7038 for (i = NEXT_INSN (i); i != insn; i = NEXT_INSN (i))
7039 set_block_for_insn (i, bb);
7040
7041 /* Retain a map of the epilogue insns. Used in life analysis to
7042 avoid getting rid of sibcall epilogue insns. */
7043 record_insns (GET_CODE (seq) == SEQUENCE
7044 ? seq : newinsn, &sibcall_epilogue);
7045 }
7046 #endif
7047
7048 #ifdef HAVE_prologue
7049 if (prologue_end)
7050 {
7051 rtx insn, prev;
7052
7053 /* GDB handles `break f' by setting a breakpoint on the first
7054 line note after the prologue. Which means (1) that if
7055 there are line number notes before where we inserted the
7056 prologue we should move them, and (2) we should generate a
7057 note before the end of the first basic block, if there isn't
7058 one already there. */
7059
7060 for (insn = prologue_end; insn ; insn = prev)
7061 {
7062 prev = PREV_INSN (insn);
7063 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7064 {
7065 /* Note that we cannot reorder the first insn in the
7066 chain, since rest_of_compilation relies on that
7067 remaining constant. */
7068 if (prev == NULL)
7069 break;
7070 reorder_insns (insn, insn, prologue_end);
7071 }
7072 }
7073
7074 /* Find the last line number note in the first block. */
7075 for (insn = BASIC_BLOCK (0)->end;
7076 insn != prologue_end;
7077 insn = PREV_INSN (insn))
7078 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7079 break;
7080
7081 /* If we didn't find one, make a copy of the first line number
7082 we run across. */
7083 if (! insn)
7084 {
7085 for (insn = next_active_insn (prologue_end);
7086 insn;
7087 insn = PREV_INSN (insn))
7088 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7089 {
7090 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7091 NOTE_LINE_NUMBER (insn),
7092 prologue_end);
7093 break;
7094 }
7095 }
7096 }
7097 #endif
7098 #ifdef HAVE_epilogue
7099 if (epilogue_end)
7100 {
7101 rtx insn, next;
7102
7103 /* Similarly, move any line notes that appear after the epilogue.
7104 There is no need, however, to be quite so anal about the existance
7105 of such a note. */
7106 for (insn = epilogue_end; insn ; insn = next)
7107 {
7108 next = NEXT_INSN (insn);
7109 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7110 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7111 }
7112 }
7113 #endif
7114 }
7115
7116 /* Reposition the prologue-end and epilogue-begin notes after instruction
7117 scheduling and delayed branch scheduling. */
7118
7119 void
7120 reposition_prologue_and_epilogue_notes (f)
7121 rtx f ATTRIBUTE_UNUSED;
7122 {
7123 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7124 int len;
7125
7126 if ((len = VARRAY_SIZE (prologue)) > 0)
7127 {
7128 register rtx insn, note = 0;
7129
7130 /* Scan from the beginning until we reach the last prologue insn.
7131 We apparently can't depend on basic_block_{head,end} after
7132 reorg has run. */
7133 for (insn = f; len && insn; insn = NEXT_INSN (insn))
7134 {
7135 if (GET_CODE (insn) == NOTE)
7136 {
7137 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7138 note = insn;
7139 }
7140 else if ((len -= contains (insn, prologue)) == 0)
7141 {
7142 rtx next;
7143 /* Find the prologue-end note if we haven't already, and
7144 move it to just after the last prologue insn. */
7145 if (note == 0)
7146 {
7147 for (note = insn; (note = NEXT_INSN (note));)
7148 if (GET_CODE (note) == NOTE
7149 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7150 break;
7151 }
7152
7153 next = NEXT_INSN (note);
7154
7155 /* Whether or not we can depend on BLOCK_HEAD,
7156 attempt to keep it up-to-date. */
7157 if (BLOCK_HEAD (0) == note)
7158 BLOCK_HEAD (0) = next;
7159
7160 remove_insn (note);
7161 add_insn_after (note, insn);
7162 }
7163 }
7164 }
7165
7166 if ((len = VARRAY_SIZE (epilogue)) > 0)
7167 {
7168 register rtx insn, note = 0;
7169
7170 /* Scan from the end until we reach the first epilogue insn.
7171 We apparently can't depend on basic_block_{head,end} after
7172 reorg has run. */
7173 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7174 {
7175 if (GET_CODE (insn) == NOTE)
7176 {
7177 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7178 note = insn;
7179 }
7180 else if ((len -= contains (insn, epilogue)) == 0)
7181 {
7182 /* Find the epilogue-begin note if we haven't already, and
7183 move it to just before the first epilogue insn. */
7184 if (note == 0)
7185 {
7186 for (note = insn; (note = PREV_INSN (note));)
7187 if (GET_CODE (note) == NOTE
7188 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7189 break;
7190 }
7191
7192 /* Whether or not we can depend on BLOCK_HEAD,
7193 attempt to keep it up-to-date. */
7194 if (n_basic_blocks
7195 && BLOCK_HEAD (n_basic_blocks-1) == insn)
7196 BLOCK_HEAD (n_basic_blocks-1) = note;
7197
7198 remove_insn (note);
7199 add_insn_before (note, insn);
7200 }
7201 }
7202 }
7203 #endif /* HAVE_prologue or HAVE_epilogue */
7204 }
7205
7206 /* Mark T for GC. */
7207
7208 static void
7209 mark_temp_slot (t)
7210 struct temp_slot *t;
7211 {
7212 while (t)
7213 {
7214 ggc_mark_rtx (t->slot);
7215 ggc_mark_rtx (t->address);
7216 ggc_mark_tree (t->rtl_expr);
7217
7218 t = t->next;
7219 }
7220 }
7221
7222 /* Mark P for GC. */
7223
7224 static void
7225 mark_function_status (p)
7226 struct function *p;
7227 {
7228 int i;
7229 rtx *r;
7230
7231 if (p == 0)
7232 return;
7233
7234 ggc_mark_rtx (p->arg_offset_rtx);
7235
7236 if (p->x_parm_reg_stack_loc)
7237 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
7238 i > 0; --i, ++r)
7239 ggc_mark_rtx (*r);
7240
7241 ggc_mark_rtx (p->return_rtx);
7242 ggc_mark_rtx (p->x_cleanup_label);
7243 ggc_mark_rtx (p->x_return_label);
7244 ggc_mark_rtx (p->x_save_expr_regs);
7245 ggc_mark_rtx (p->x_stack_slot_list);
7246 ggc_mark_rtx (p->x_parm_birth_insn);
7247 ggc_mark_rtx (p->x_tail_recursion_label);
7248 ggc_mark_rtx (p->x_tail_recursion_reentry);
7249 ggc_mark_rtx (p->internal_arg_pointer);
7250 ggc_mark_rtx (p->x_arg_pointer_save_area);
7251 ggc_mark_tree (p->x_rtl_expr_chain);
7252 ggc_mark_rtx (p->x_last_parm_insn);
7253 ggc_mark_tree (p->x_context_display);
7254 ggc_mark_tree (p->x_trampoline_list);
7255 ggc_mark_rtx (p->epilogue_delay_list);
7256
7257 mark_temp_slot (p->x_temp_slots);
7258
7259 {
7260 struct var_refs_queue *q = p->fixup_var_refs_queue;
7261 while (q)
7262 {
7263 ggc_mark_rtx (q->modified);
7264 q = q->next;
7265 }
7266 }
7267
7268 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
7269 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
7270 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
7271 ggc_mark_tree (p->x_nonlocal_labels);
7272 }
7273
7274 /* Mark the function chain ARG (which is really a struct function **)
7275 for GC. */
7276
7277 static void
7278 mark_function_chain (arg)
7279 void *arg;
7280 {
7281 struct function *f = *(struct function **) arg;
7282
7283 for (; f; f = f->next_global)
7284 {
7285 ggc_mark_tree (f->decl);
7286
7287 mark_function_status (f);
7288 mark_eh_status (f->eh);
7289 mark_stmt_status (f->stmt);
7290 mark_expr_status (f->expr);
7291 mark_emit_status (f->emit);
7292 mark_varasm_status (f->varasm);
7293
7294 if (mark_machine_status)
7295 (*mark_machine_status) (f);
7296 if (mark_lang_status)
7297 (*mark_lang_status) (f);
7298
7299 if (f->original_arg_vector)
7300 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7301 if (f->original_decl_initial)
7302 ggc_mark_tree (f->original_decl_initial);
7303 }
7304 }
7305
7306 /* Called once, at initialization, to initialize function.c. */
7307
7308 void
7309 init_function_once ()
7310 {
7311 ggc_add_root (&all_functions, 1, sizeof all_functions,
7312 mark_function_chain);
7313
7314 VARRAY_INT_INIT (prologue, 0, "prologue");
7315 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7316 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
7317 }
This page took 0.354627 seconds and 6 git commands to generate.