]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
calls.c (calls_function_1, [...]): Only test TYPE_RETURNS_STACK_DEPRESSED for FUNCTIO...
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59 #include "hash.h"
60 #include "ggc.h"
61 #include "tm_p.h"
62
63 #ifndef ACCUMULATE_OUTGOING_ARGS
64 #define ACCUMULATE_OUTGOING_ARGS 0
65 #endif
66
67 #ifndef TRAMPOLINE_ALIGNMENT
68 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
69 #endif
70
71 #ifndef LOCAL_ALIGNMENT
72 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
73 #endif
74
75 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
76 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
77 #endif
78
79 /* Some systems use __main in a way incompatible with its use in gcc, in these
80 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
81 give the same symbol without quotes for an alternative entry point. You
82 must define both, or neither. */
83 #ifndef NAME__MAIN
84 #define NAME__MAIN "__main"
85 #define SYMBOL__MAIN __main
86 #endif
87
88 /* Round a value to the lowest integer less than it that is a multiple of
89 the required alignment. Avoid using division in case the value is
90 negative. Assume the alignment is a power of two. */
91 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
92
93 /* Similar, but round to the next highest integer that meets the
94 alignment. */
95 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
96
97 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
98 during rtl generation. If they are different register numbers, this is
99 always true. It may also be true if
100 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
101 generation. See fix_lexical_addr for details. */
102
103 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
104 #define NEED_SEPARATE_AP
105 #endif
106
107 /* Nonzero if function being compiled doesn't contain any calls
108 (ignoring the prologue and epilogue). This is set prior to
109 local register allocation and is valid for the remaining
110 compiler passes. */
111 int current_function_is_leaf;
112
113 /* Nonzero if function being compiled doesn't contain any instructions
114 that can throw an exception. This is set prior to final. */
115
116 int current_function_nothrow;
117
118 /* Nonzero if function being compiled doesn't modify the stack pointer
119 (ignoring the prologue and epilogue). This is only valid after
120 life_analysis has run. */
121 int current_function_sp_is_unchanging;
122
123 /* Nonzero if the function being compiled is a leaf function which only
124 uses leaf registers. This is valid after reload (specifically after
125 sched2) and is useful only if the port defines LEAF_REGISTERS. */
126 int current_function_uses_only_leaf_regs;
127
128 /* Nonzero once virtual register instantiation has been done.
129 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
130 static int virtuals_instantiated;
131
132 /* These variables hold pointers to functions to
133 save and restore machine-specific data,
134 in push_function_context and pop_function_context. */
135 void (*init_machine_status) PARAMS ((struct function *));
136 void (*save_machine_status) PARAMS ((struct function *));
137 void (*restore_machine_status) PARAMS ((struct function *));
138 void (*mark_machine_status) PARAMS ((struct function *));
139 void (*free_machine_status) PARAMS ((struct function *));
140
141 /* Likewise, but for language-specific data. */
142 void (*init_lang_status) PARAMS ((struct function *));
143 void (*save_lang_status) PARAMS ((struct function *));
144 void (*restore_lang_status) PARAMS ((struct function *));
145 void (*mark_lang_status) PARAMS ((struct function *));
146 void (*free_lang_status) PARAMS ((struct function *));
147
148 /* The FUNCTION_DECL for an inline function currently being expanded. */
149 tree inline_function_decl;
150
151 /* The currently compiled function. */
152 struct function *cfun = 0;
153
154 /* Global list of all compiled functions. */
155 struct function *all_functions = 0;
156
157 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
158 static varray_type prologue;
159 static varray_type epilogue;
160
161 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
162 in this function. */
163 static varray_type sibcall_epilogue;
164 \f
165 /* In order to evaluate some expressions, such as function calls returning
166 structures in memory, we need to temporarily allocate stack locations.
167 We record each allocated temporary in the following structure.
168
169 Associated with each temporary slot is a nesting level. When we pop up
170 one level, all temporaries associated with the previous level are freed.
171 Normally, all temporaries are freed after the execution of the statement
172 in which they were created. However, if we are inside a ({...}) grouping,
173 the result may be in a temporary and hence must be preserved. If the
174 result could be in a temporary, we preserve it if we can determine which
175 one it is in. If we cannot determine which temporary may contain the
176 result, all temporaries are preserved. A temporary is preserved by
177 pretending it was allocated at the previous nesting level.
178
179 Automatic variables are also assigned temporary slots, at the nesting
180 level where they are defined. They are marked a "kept" so that
181 free_temp_slots will not free them. */
182
183 struct temp_slot
184 {
185 /* Points to next temporary slot. */
186 struct temp_slot *next;
187 /* The rtx to used to reference the slot. */
188 rtx slot;
189 /* The rtx used to represent the address if not the address of the
190 slot above. May be an EXPR_LIST if multiple addresses exist. */
191 rtx address;
192 /* The alignment (in bits) of the slot. */
193 int align;
194 /* The size, in units, of the slot. */
195 HOST_WIDE_INT size;
196 /* The alias set for the slot. If the alias set is zero, we don't
197 know anything about the alias set of the slot. We must only
198 reuse a slot if it is assigned an object of the same alias set.
199 Otherwise, the rest of the compiler may assume that the new use
200 of the slot cannot alias the old use of the slot, which is
201 false. If the slot has alias set zero, then we can't reuse the
202 slot at all, since we have no idea what alias set may have been
203 imposed on the memory. For example, if the stack slot is the
204 call frame for an inline functioned, we have no idea what alias
205 sets will be assigned to various pieces of the call frame. */
206 HOST_WIDE_INT alias_set;
207 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
208 tree rtl_expr;
209 /* Non-zero if this temporary is currently in use. */
210 char in_use;
211 /* Non-zero if this temporary has its address taken. */
212 char addr_taken;
213 /* Nesting level at which this slot is being used. */
214 int level;
215 /* Non-zero if this should survive a call to free_temp_slots. */
216 int keep;
217 /* The offset of the slot from the frame_pointer, including extra space
218 for alignment. This info is for combine_temp_slots. */
219 HOST_WIDE_INT base_offset;
220 /* The size of the slot, including extra space for alignment. This
221 info is for combine_temp_slots. */
222 HOST_WIDE_INT full_size;
223 };
224 \f
225 /* This structure is used to record MEMs or pseudos used to replace VAR, any
226 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
227 maintain this list in case two operands of an insn were required to match;
228 in that case we must ensure we use the same replacement. */
229
230 struct fixup_replacement
231 {
232 rtx old;
233 rtx new;
234 struct fixup_replacement *next;
235 };
236
237 struct insns_for_mem_entry {
238 /* The KEY in HE will be a MEM. */
239 struct hash_entry he;
240 /* These are the INSNS which reference the MEM. */
241 rtx insns;
242 };
243
244 /* Forward declarations. */
245
246 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
247 int, struct function *));
248 static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode,
249 HOST_WIDE_INT, int, tree));
250 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
251 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
252 enum machine_mode, enum machine_mode,
253 int, unsigned int, int,
254 struct hash_table *));
255 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
256 enum machine_mode,
257 struct hash_table *));
258 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
259 struct hash_table *));
260 static struct fixup_replacement
261 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
262 static void fixup_var_refs_insns PARAMS ((rtx, enum machine_mode, int,
263 rtx, int, struct hash_table *));
264 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
265 struct fixup_replacement **));
266 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
267 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
268 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
269 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
270 static void instantiate_decls PARAMS ((tree, int));
271 static void instantiate_decls_1 PARAMS ((tree, int));
272 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
273 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
274 static void delete_handlers PARAMS ((void));
275 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
276 struct args_size *));
277 #ifndef ARGS_GROW_DOWNWARD
278 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
279 tree));
280 #endif
281 static rtx round_trampoline_addr PARAMS ((rtx));
282 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
283 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
284 static tree blocks_nreverse PARAMS ((tree));
285 static int all_blocks PARAMS ((tree, tree *));
286 static tree *get_block_vector PARAMS ((tree, int *));
287 /* We always define `record_insns' even if its not used so that we
288 can always export `prologue_epilogue_contains'. */
289 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
290 static int contains PARAMS ((rtx, varray_type));
291 #ifdef HAVE_return
292 static void emit_return_into_block PARAMS ((basic_block, rtx));
293 #endif
294 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
295 static boolean purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
296 struct hash_table *));
297 #ifdef HAVE_epilogue
298 static void keep_stack_depressed PARAMS ((rtx));
299 #endif
300 static int is_addressof PARAMS ((rtx *, void *));
301 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
302 struct hash_table *,
303 hash_table_key));
304 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
305 static boolean insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
306 static int insns_for_mem_walk PARAMS ((rtx *, void *));
307 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
308 static void mark_temp_slot PARAMS ((struct temp_slot *));
309 static void mark_function_status PARAMS ((struct function *));
310 static void mark_function_chain PARAMS ((void *));
311 static void prepare_function_start PARAMS ((void));
312 static void do_clobber_return_reg PARAMS ((rtx, void *));
313 static void do_use_return_reg PARAMS ((rtx, void *));
314 \f
315 /* Pointer to chain of `struct function' for containing functions. */
316 struct function *outer_function_chain;
317
318 /* Given a function decl for a containing function,
319 return the `struct function' for it. */
320
321 struct function *
322 find_function_data (decl)
323 tree decl;
324 {
325 struct function *p;
326
327 for (p = outer_function_chain; p; p = p->next)
328 if (p->decl == decl)
329 return p;
330
331 abort ();
332 }
333
334 /* Save the current context for compilation of a nested function.
335 This is called from language-specific code. The caller should use
336 the save_lang_status callback to save any language-specific state,
337 since this function knows only about language-independent
338 variables. */
339
340 void
341 push_function_context_to (context)
342 tree context;
343 {
344 struct function *p, *context_data;
345
346 if (context)
347 {
348 context_data = (context == current_function_decl
349 ? cfun
350 : find_function_data (context));
351 context_data->contains_functions = 1;
352 }
353
354 if (cfun == 0)
355 init_dummy_function_start ();
356 p = cfun;
357
358 p->next = outer_function_chain;
359 outer_function_chain = p;
360 p->fixup_var_refs_queue = 0;
361
362 save_tree_status (p);
363 if (save_lang_status)
364 (*save_lang_status) (p);
365 if (save_machine_status)
366 (*save_machine_status) (p);
367
368 cfun = 0;
369 }
370
371 void
372 push_function_context ()
373 {
374 push_function_context_to (current_function_decl);
375 }
376
377 /* Restore the last saved context, at the end of a nested function.
378 This function is called from language-specific code. */
379
380 void
381 pop_function_context_from (context)
382 tree context ATTRIBUTE_UNUSED;
383 {
384 struct function *p = outer_function_chain;
385 struct var_refs_queue *queue;
386 struct var_refs_queue *next;
387
388 cfun = p;
389 outer_function_chain = p->next;
390
391 current_function_decl = p->decl;
392 reg_renumber = 0;
393
394 restore_tree_status (p);
395 restore_emit_status (p);
396
397 if (restore_machine_status)
398 (*restore_machine_status) (p);
399 if (restore_lang_status)
400 (*restore_lang_status) (p);
401
402 /* Finish doing put_var_into_stack for any of our variables
403 which became addressable during the nested function. */
404 for (queue = p->fixup_var_refs_queue; queue; queue = next)
405 {
406 next = queue->next;
407 fixup_var_refs (queue->modified, queue->promoted_mode,
408 queue->unsignedp, 0);
409 free (queue);
410 }
411 p->fixup_var_refs_queue = 0;
412
413 /* Reset variables that have known state during rtx generation. */
414 rtx_equal_function_value_matters = 1;
415 virtuals_instantiated = 0;
416 }
417
418 void
419 pop_function_context ()
420 {
421 pop_function_context_from (current_function_decl);
422 }
423
424 /* Clear out all parts of the state in F that can safely be discarded
425 after the function has been parsed, but not compiled, to let
426 garbage collection reclaim the memory. */
427
428 void
429 free_after_parsing (f)
430 struct function *f;
431 {
432 /* f->expr->forced_labels is used by code generation. */
433 /* f->emit->regno_reg_rtx is used by code generation. */
434 /* f->varasm is used by code generation. */
435 /* f->eh->eh_return_stub_label is used by code generation. */
436
437 if (free_lang_status)
438 (*free_lang_status) (f);
439 free_stmt_status (f);
440 }
441
442 /* Clear out all parts of the state in F that can safely be discarded
443 after the function has been compiled, to let garbage collection
444 reclaim the memory. */
445
446 void
447 free_after_compilation (f)
448 struct function *f;
449 {
450 struct temp_slot *ts;
451 struct temp_slot *next;
452
453 free_eh_status (f);
454 free_expr_status (f);
455 free_emit_status (f);
456 free_varasm_status (f);
457
458 if (free_machine_status)
459 (*free_machine_status) (f);
460
461 if (f->x_parm_reg_stack_loc)
462 free (f->x_parm_reg_stack_loc);
463
464 for (ts = f->x_temp_slots; ts; ts = next)
465 {
466 next = ts->next;
467 free (ts);
468 }
469 f->x_temp_slots = NULL;
470
471 f->arg_offset_rtx = NULL;
472 f->return_rtx = NULL;
473 f->internal_arg_pointer = NULL;
474 f->x_nonlocal_labels = NULL;
475 f->x_nonlocal_goto_handler_slots = NULL;
476 f->x_nonlocal_goto_handler_labels = NULL;
477 f->x_nonlocal_goto_stack_level = NULL;
478 f->x_cleanup_label = NULL;
479 f->x_return_label = NULL;
480 f->x_save_expr_regs = NULL;
481 f->x_stack_slot_list = NULL;
482 f->x_rtl_expr_chain = NULL;
483 f->x_tail_recursion_label = NULL;
484 f->x_tail_recursion_reentry = NULL;
485 f->x_arg_pointer_save_area = NULL;
486 f->x_context_display = NULL;
487 f->x_trampoline_list = NULL;
488 f->x_parm_birth_insn = NULL;
489 f->x_last_parm_insn = NULL;
490 f->x_parm_reg_stack_loc = NULL;
491 f->fixup_var_refs_queue = NULL;
492 f->original_arg_vector = NULL;
493 f->original_decl_initial = NULL;
494 f->inl_last_parm_insn = NULL;
495 f->epilogue_delay_list = NULL;
496 }
497 \f
498 /* Allocate fixed slots in the stack frame of the current function. */
499
500 /* Return size needed for stack frame based on slots so far allocated in
501 function F.
502 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
503 the caller may have to do that. */
504
505 HOST_WIDE_INT
506 get_func_frame_size (f)
507 struct function *f;
508 {
509 #ifdef FRAME_GROWS_DOWNWARD
510 return -f->x_frame_offset;
511 #else
512 return f->x_frame_offset;
513 #endif
514 }
515
516 /* Return size needed for stack frame based on slots so far allocated.
517 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
518 the caller may have to do that. */
519 HOST_WIDE_INT
520 get_frame_size ()
521 {
522 return get_func_frame_size (cfun);
523 }
524
525 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
526 with machine mode MODE.
527
528 ALIGN controls the amount of alignment for the address of the slot:
529 0 means according to MODE,
530 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
531 positive specifies alignment boundary in bits.
532
533 We do not round to stack_boundary here.
534
535 FUNCTION specifies the function to allocate in. */
536
537 static rtx
538 assign_stack_local_1 (mode, size, align, function)
539 enum machine_mode mode;
540 HOST_WIDE_INT size;
541 int align;
542 struct function *function;
543 {
544 register rtx x, addr;
545 int bigend_correction = 0;
546 int alignment;
547
548 /* Allocate in the memory associated with the function in whose frame
549 we are assigning. */
550 if (function != cfun)
551 push_obstacks (function->function_obstack,
552 function->function_maybepermanent_obstack);
553
554 if (align == 0)
555 {
556 tree type;
557
558 if (mode == BLKmode)
559 alignment = BIGGEST_ALIGNMENT;
560 else
561 alignment = GET_MODE_ALIGNMENT (mode);
562
563 /* Allow the target to (possibly) increase the alignment of this
564 stack slot. */
565 type = type_for_mode (mode, 0);
566 if (type)
567 alignment = LOCAL_ALIGNMENT (type, alignment);
568
569 alignment /= BITS_PER_UNIT;
570 }
571 else if (align == -1)
572 {
573 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
574 size = CEIL_ROUND (size, alignment);
575 }
576 else
577 alignment = align / BITS_PER_UNIT;
578
579 #ifdef FRAME_GROWS_DOWNWARD
580 function->x_frame_offset -= size;
581 #endif
582
583 /* Ignore alignment we can't do with expected alignment of the boundary. */
584 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
585 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
586
587 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
588 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
589
590 /* Round frame offset to that alignment.
591 We must be careful here, since FRAME_OFFSET might be negative and
592 division with a negative dividend isn't as well defined as we might
593 like. So we instead assume that ALIGNMENT is a power of two and
594 use logical operations which are unambiguous. */
595 #ifdef FRAME_GROWS_DOWNWARD
596 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
597 #else
598 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
599 #endif
600
601 /* On a big-endian machine, if we are allocating more space than we will use,
602 use the least significant bytes of those that are allocated. */
603 if (BYTES_BIG_ENDIAN && mode != BLKmode)
604 bigend_correction = size - GET_MODE_SIZE (mode);
605
606 /* If we have already instantiated virtual registers, return the actual
607 address relative to the frame pointer. */
608 if (function == cfun && virtuals_instantiated)
609 addr = plus_constant (frame_pointer_rtx,
610 (frame_offset + bigend_correction
611 + STARTING_FRAME_OFFSET));
612 else
613 addr = plus_constant (virtual_stack_vars_rtx,
614 function->x_frame_offset + bigend_correction);
615
616 #ifndef FRAME_GROWS_DOWNWARD
617 function->x_frame_offset += size;
618 #endif
619
620 x = gen_rtx_MEM (mode, addr);
621
622 function->x_stack_slot_list
623 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
624
625 if (function != cfun)
626 pop_obstacks ();
627
628 return x;
629 }
630
631 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
632 current function. */
633
634 rtx
635 assign_stack_local (mode, size, align)
636 enum machine_mode mode;
637 HOST_WIDE_INT size;
638 int align;
639 {
640 return assign_stack_local_1 (mode, size, align, cfun);
641 }
642 \f
643 /* Allocate a temporary stack slot and record it for possible later
644 reuse.
645
646 MODE is the machine mode to be given to the returned rtx.
647
648 SIZE is the size in units of the space required. We do no rounding here
649 since assign_stack_local will do any required rounding.
650
651 KEEP is 1 if this slot is to be retained after a call to
652 free_temp_slots. Automatic variables for a block are allocated
653 with this flag. KEEP is 2 if we allocate a longer term temporary,
654 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
655 if we are to allocate something at an inner level to be treated as
656 a variable in the block (e.g., a SAVE_EXPR).
657
658 TYPE is the type that will be used for the stack slot. */
659
660 static rtx
661 assign_stack_temp_for_type (mode, size, keep, type)
662 enum machine_mode mode;
663 HOST_WIDE_INT size;
664 int keep;
665 tree type;
666 {
667 int align;
668 HOST_WIDE_INT alias_set;
669 struct temp_slot *p, *best_p = 0;
670
671 /* If SIZE is -1 it means that somebody tried to allocate a temporary
672 of a variable size. */
673 if (size == -1)
674 abort ();
675
676 /* If we know the alias set for the memory that will be used, use
677 it. If there's no TYPE, then we don't know anything about the
678 alias set for the memory. */
679 if (type)
680 alias_set = get_alias_set (type);
681 else
682 alias_set = 0;
683
684 if (mode == BLKmode)
685 align = BIGGEST_ALIGNMENT;
686 else
687 align = GET_MODE_ALIGNMENT (mode);
688
689 if (! type)
690 type = type_for_mode (mode, 0);
691
692 if (type)
693 align = LOCAL_ALIGNMENT (type, align);
694
695 /* Try to find an available, already-allocated temporary of the proper
696 mode which meets the size and alignment requirements. Choose the
697 smallest one with the closest alignment. */
698 for (p = temp_slots; p; p = p->next)
699 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
700 && ! p->in_use
701 && (! flag_strict_aliasing
702 || (alias_set && p->alias_set == alias_set))
703 && (best_p == 0 || best_p->size > p->size
704 || (best_p->size == p->size && best_p->align > p->align)))
705 {
706 if (p->align == align && p->size == size)
707 {
708 best_p = 0;
709 break;
710 }
711 best_p = p;
712 }
713
714 /* Make our best, if any, the one to use. */
715 if (best_p)
716 {
717 /* If there are enough aligned bytes left over, make them into a new
718 temp_slot so that the extra bytes don't get wasted. Do this only
719 for BLKmode slots, so that we can be sure of the alignment. */
720 if (GET_MODE (best_p->slot) == BLKmode)
721 {
722 int alignment = best_p->align / BITS_PER_UNIT;
723 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
724
725 if (best_p->size - rounded_size >= alignment)
726 {
727 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
728 p->in_use = p->addr_taken = 0;
729 p->size = best_p->size - rounded_size;
730 p->base_offset = best_p->base_offset + rounded_size;
731 p->full_size = best_p->full_size - rounded_size;
732 p->slot = gen_rtx_MEM (BLKmode,
733 plus_constant (XEXP (best_p->slot, 0),
734 rounded_size));
735 p->align = best_p->align;
736 p->address = 0;
737 p->rtl_expr = 0;
738 p->alias_set = best_p->alias_set;
739 p->next = temp_slots;
740 temp_slots = p;
741
742 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
743 stack_slot_list);
744
745 best_p->size = rounded_size;
746 best_p->full_size = rounded_size;
747 }
748 }
749
750 p = best_p;
751 }
752
753 /* If we still didn't find one, make a new temporary. */
754 if (p == 0)
755 {
756 HOST_WIDE_INT frame_offset_old = frame_offset;
757
758 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
759
760 /* We are passing an explicit alignment request to assign_stack_local.
761 One side effect of that is assign_stack_local will not round SIZE
762 to ensure the frame offset remains suitably aligned.
763
764 So for requests which depended on the rounding of SIZE, we go ahead
765 and round it now. We also make sure ALIGNMENT is at least
766 BIGGEST_ALIGNMENT. */
767 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
768 abort();
769 p->slot = assign_stack_local (mode,
770 (mode == BLKmode
771 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
772 : size),
773 align);
774
775 p->align = align;
776 p->alias_set = alias_set;
777
778 /* The following slot size computation is necessary because we don't
779 know the actual size of the temporary slot until assign_stack_local
780 has performed all the frame alignment and size rounding for the
781 requested temporary. Note that extra space added for alignment
782 can be either above or below this stack slot depending on which
783 way the frame grows. We include the extra space if and only if it
784 is above this slot. */
785 #ifdef FRAME_GROWS_DOWNWARD
786 p->size = frame_offset_old - frame_offset;
787 #else
788 p->size = size;
789 #endif
790
791 /* Now define the fields used by combine_temp_slots. */
792 #ifdef FRAME_GROWS_DOWNWARD
793 p->base_offset = frame_offset;
794 p->full_size = frame_offset_old - frame_offset;
795 #else
796 p->base_offset = frame_offset_old;
797 p->full_size = frame_offset - frame_offset_old;
798 #endif
799 p->address = 0;
800 p->next = temp_slots;
801 temp_slots = p;
802 }
803
804 p->in_use = 1;
805 p->addr_taken = 0;
806 p->rtl_expr = seq_rtl_expr;
807
808 if (keep == 2)
809 {
810 p->level = target_temp_slot_level;
811 p->keep = 0;
812 }
813 else if (keep == 3)
814 {
815 p->level = var_temp_slot_level;
816 p->keep = 0;
817 }
818 else
819 {
820 p->level = temp_slot_level;
821 p->keep = keep;
822 }
823
824 /* We may be reusing an old slot, so clear any MEM flags that may have been
825 set from before. */
826 RTX_UNCHANGING_P (p->slot) = 0;
827 MEM_IN_STRUCT_P (p->slot) = 0;
828 MEM_SCALAR_P (p->slot) = 0;
829 MEM_ALIAS_SET (p->slot) = alias_set;
830
831 if (type != 0)
832 MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
833
834 return p->slot;
835 }
836
837 /* Allocate a temporary stack slot and record it for possible later
838 reuse. First three arguments are same as in preceding function. */
839
840 rtx
841 assign_stack_temp (mode, size, keep)
842 enum machine_mode mode;
843 HOST_WIDE_INT size;
844 int keep;
845 {
846 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
847 }
848 \f
849 /* Assign a temporary of given TYPE.
850 KEEP is as for assign_stack_temp.
851 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
852 it is 0 if a register is OK.
853 DONT_PROMOTE is 1 if we should not promote values in register
854 to wider modes. */
855
856 rtx
857 assign_temp (type, keep, memory_required, dont_promote)
858 tree type;
859 int keep;
860 int memory_required;
861 int dont_promote ATTRIBUTE_UNUSED;
862 {
863 enum machine_mode mode = TYPE_MODE (type);
864 #ifndef PROMOTE_FOR_CALL_ONLY
865 int unsignedp = TREE_UNSIGNED (type);
866 #endif
867
868 if (mode == BLKmode || memory_required)
869 {
870 HOST_WIDE_INT size = int_size_in_bytes (type);
871 rtx tmp;
872
873 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
874 problems with allocating the stack space. */
875 if (size == 0)
876 size = 1;
877
878 /* Unfortunately, we don't yet know how to allocate variable-sized
879 temporaries. However, sometimes we have a fixed upper limit on
880 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
881 instead. This is the case for Chill variable-sized strings. */
882 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
883 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
884 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
885 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
886
887 tmp = assign_stack_temp_for_type (mode, size, keep, type);
888 return tmp;
889 }
890
891 #ifndef PROMOTE_FOR_CALL_ONLY
892 if (! dont_promote)
893 mode = promote_mode (type, mode, &unsignedp, 0);
894 #endif
895
896 return gen_reg_rtx (mode);
897 }
898 \f
899 /* Combine temporary stack slots which are adjacent on the stack.
900
901 This allows for better use of already allocated stack space. This is only
902 done for BLKmode slots because we can be sure that we won't have alignment
903 problems in this case. */
904
905 void
906 combine_temp_slots ()
907 {
908 struct temp_slot *p, *q;
909 struct temp_slot *prev_p, *prev_q;
910 int num_slots;
911
912 /* We can't combine slots, because the information about which slot
913 is in which alias set will be lost. */
914 if (flag_strict_aliasing)
915 return;
916
917 /* If there are a lot of temp slots, don't do anything unless
918 high levels of optimizaton. */
919 if (! flag_expensive_optimizations)
920 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
921 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
922 return;
923
924 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
925 {
926 int delete_p = 0;
927
928 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
929 for (q = p->next, prev_q = p; q; q = prev_q->next)
930 {
931 int delete_q = 0;
932 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
933 {
934 if (p->base_offset + p->full_size == q->base_offset)
935 {
936 /* Q comes after P; combine Q into P. */
937 p->size += q->size;
938 p->full_size += q->full_size;
939 delete_q = 1;
940 }
941 else if (q->base_offset + q->full_size == p->base_offset)
942 {
943 /* P comes after Q; combine P into Q. */
944 q->size += p->size;
945 q->full_size += p->full_size;
946 delete_p = 1;
947 break;
948 }
949 }
950 /* Either delete Q or advance past it. */
951 if (delete_q)
952 {
953 prev_q->next = q->next;
954 free (q);
955 }
956 else
957 prev_q = q;
958 }
959 /* Either delete P or advance past it. */
960 if (delete_p)
961 {
962 if (prev_p)
963 prev_p->next = p->next;
964 else
965 temp_slots = p->next;
966 }
967 else
968 prev_p = p;
969 }
970 }
971 \f
972 /* Find the temp slot corresponding to the object at address X. */
973
974 static struct temp_slot *
975 find_temp_slot_from_address (x)
976 rtx x;
977 {
978 struct temp_slot *p;
979 rtx next;
980
981 for (p = temp_slots; p; p = p->next)
982 {
983 if (! p->in_use)
984 continue;
985
986 else if (XEXP (p->slot, 0) == x
987 || p->address == x
988 || (GET_CODE (x) == PLUS
989 && XEXP (x, 0) == virtual_stack_vars_rtx
990 && GET_CODE (XEXP (x, 1)) == CONST_INT
991 && INTVAL (XEXP (x, 1)) >= p->base_offset
992 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
993 return p;
994
995 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
996 for (next = p->address; next; next = XEXP (next, 1))
997 if (XEXP (next, 0) == x)
998 return p;
999 }
1000
1001 /* If we have a sum involving a register, see if it points to a temp
1002 slot. */
1003 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1004 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1005 return p;
1006 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1007 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1008 return p;
1009
1010 return 0;
1011 }
1012
1013 /* Indicate that NEW is an alternate way of referring to the temp slot
1014 that previously was known by OLD. */
1015
1016 void
1017 update_temp_slot_address (old, new)
1018 rtx old, new;
1019 {
1020 struct temp_slot *p;
1021
1022 if (rtx_equal_p (old, new))
1023 return;
1024
1025 p = find_temp_slot_from_address (old);
1026
1027 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1028 is a register, see if one operand of the PLUS is a temporary
1029 location. If so, NEW points into it. Otherwise, if both OLD and
1030 NEW are a PLUS and if there is a register in common between them.
1031 If so, try a recursive call on those values. */
1032 if (p == 0)
1033 {
1034 if (GET_CODE (old) != PLUS)
1035 return;
1036
1037 if (GET_CODE (new) == REG)
1038 {
1039 update_temp_slot_address (XEXP (old, 0), new);
1040 update_temp_slot_address (XEXP (old, 1), new);
1041 return;
1042 }
1043 else if (GET_CODE (new) != PLUS)
1044 return;
1045
1046 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1047 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1048 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1049 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1050 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1051 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1052 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1053 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1054
1055 return;
1056 }
1057
1058 /* Otherwise add an alias for the temp's address. */
1059 else if (p->address == 0)
1060 p->address = new;
1061 else
1062 {
1063 if (GET_CODE (p->address) != EXPR_LIST)
1064 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1065
1066 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1067 }
1068 }
1069
1070 /* If X could be a reference to a temporary slot, mark the fact that its
1071 address was taken. */
1072
1073 void
1074 mark_temp_addr_taken (x)
1075 rtx x;
1076 {
1077 struct temp_slot *p;
1078
1079 if (x == 0)
1080 return;
1081
1082 /* If X is not in memory or is at a constant address, it cannot be in
1083 a temporary slot. */
1084 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1085 return;
1086
1087 p = find_temp_slot_from_address (XEXP (x, 0));
1088 if (p != 0)
1089 p->addr_taken = 1;
1090 }
1091
1092 /* If X could be a reference to a temporary slot, mark that slot as
1093 belonging to the to one level higher than the current level. If X
1094 matched one of our slots, just mark that one. Otherwise, we can't
1095 easily predict which it is, so upgrade all of them. Kept slots
1096 need not be touched.
1097
1098 This is called when an ({...}) construct occurs and a statement
1099 returns a value in memory. */
1100
1101 void
1102 preserve_temp_slots (x)
1103 rtx x;
1104 {
1105 struct temp_slot *p = 0;
1106
1107 /* If there is no result, we still might have some objects whose address
1108 were taken, so we need to make sure they stay around. */
1109 if (x == 0)
1110 {
1111 for (p = temp_slots; p; p = p->next)
1112 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1113 p->level--;
1114
1115 return;
1116 }
1117
1118 /* If X is a register that is being used as a pointer, see if we have
1119 a temporary slot we know it points to. To be consistent with
1120 the code below, we really should preserve all non-kept slots
1121 if we can't find a match, but that seems to be much too costly. */
1122 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1123 p = find_temp_slot_from_address (x);
1124
1125 /* If X is not in memory or is at a constant address, it cannot be in
1126 a temporary slot, but it can contain something whose address was
1127 taken. */
1128 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1129 {
1130 for (p = temp_slots; p; p = p->next)
1131 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1132 p->level--;
1133
1134 return;
1135 }
1136
1137 /* First see if we can find a match. */
1138 if (p == 0)
1139 p = find_temp_slot_from_address (XEXP (x, 0));
1140
1141 if (p != 0)
1142 {
1143 /* Move everything at our level whose address was taken to our new
1144 level in case we used its address. */
1145 struct temp_slot *q;
1146
1147 if (p->level == temp_slot_level)
1148 {
1149 for (q = temp_slots; q; q = q->next)
1150 if (q != p && q->addr_taken && q->level == p->level)
1151 q->level--;
1152
1153 p->level--;
1154 p->addr_taken = 0;
1155 }
1156 return;
1157 }
1158
1159 /* Otherwise, preserve all non-kept slots at this level. */
1160 for (p = temp_slots; p; p = p->next)
1161 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1162 p->level--;
1163 }
1164
1165 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1166 with that RTL_EXPR, promote it into a temporary slot at the present
1167 level so it will not be freed when we free slots made in the
1168 RTL_EXPR. */
1169
1170 void
1171 preserve_rtl_expr_result (x)
1172 rtx x;
1173 {
1174 struct temp_slot *p;
1175
1176 /* If X is not in memory or is at a constant address, it cannot be in
1177 a temporary slot. */
1178 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1179 return;
1180
1181 /* If we can find a match, move it to our level unless it is already at
1182 an upper level. */
1183 p = find_temp_slot_from_address (XEXP (x, 0));
1184 if (p != 0)
1185 {
1186 p->level = MIN (p->level, temp_slot_level);
1187 p->rtl_expr = 0;
1188 }
1189
1190 return;
1191 }
1192
1193 /* Free all temporaries used so far. This is normally called at the end
1194 of generating code for a statement. Don't free any temporaries
1195 currently in use for an RTL_EXPR that hasn't yet been emitted.
1196 We could eventually do better than this since it can be reused while
1197 generating the same RTL_EXPR, but this is complex and probably not
1198 worthwhile. */
1199
1200 void
1201 free_temp_slots ()
1202 {
1203 struct temp_slot *p;
1204
1205 for (p = temp_slots; p; p = p->next)
1206 if (p->in_use && p->level == temp_slot_level && ! p->keep
1207 && p->rtl_expr == 0)
1208 p->in_use = 0;
1209
1210 combine_temp_slots ();
1211 }
1212
1213 /* Free all temporary slots used in T, an RTL_EXPR node. */
1214
1215 void
1216 free_temps_for_rtl_expr (t)
1217 tree t;
1218 {
1219 struct temp_slot *p;
1220
1221 for (p = temp_slots; p; p = p->next)
1222 if (p->rtl_expr == t)
1223 {
1224 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1225 needs to be preserved. This can happen if a temporary in
1226 the RTL_EXPR was addressed; preserve_temp_slots will move
1227 the temporary into a higher level. */
1228 if (temp_slot_level <= p->level)
1229 p->in_use = 0;
1230 else
1231 p->rtl_expr = NULL_TREE;
1232 }
1233
1234 combine_temp_slots ();
1235 }
1236
1237 /* Mark all temporaries ever allocated in this function as not suitable
1238 for reuse until the current level is exited. */
1239
1240 void
1241 mark_all_temps_used ()
1242 {
1243 struct temp_slot *p;
1244
1245 for (p = temp_slots; p; p = p->next)
1246 {
1247 p->in_use = p->keep = 1;
1248 p->level = MIN (p->level, temp_slot_level);
1249 }
1250 }
1251
1252 /* Push deeper into the nesting level for stack temporaries. */
1253
1254 void
1255 push_temp_slots ()
1256 {
1257 temp_slot_level++;
1258 }
1259
1260 /* Likewise, but save the new level as the place to allocate variables
1261 for blocks. */
1262
1263 #if 0
1264 void
1265 push_temp_slots_for_block ()
1266 {
1267 push_temp_slots ();
1268
1269 var_temp_slot_level = temp_slot_level;
1270 }
1271
1272 /* Likewise, but save the new level as the place to allocate temporaries
1273 for TARGET_EXPRs. */
1274
1275 void
1276 push_temp_slots_for_target ()
1277 {
1278 push_temp_slots ();
1279
1280 target_temp_slot_level = temp_slot_level;
1281 }
1282
1283 /* Set and get the value of target_temp_slot_level. The only
1284 permitted use of these functions is to save and restore this value. */
1285
1286 int
1287 get_target_temp_slot_level ()
1288 {
1289 return target_temp_slot_level;
1290 }
1291
1292 void
1293 set_target_temp_slot_level (level)
1294 int level;
1295 {
1296 target_temp_slot_level = level;
1297 }
1298 #endif
1299
1300 /* Pop a temporary nesting level. All slots in use in the current level
1301 are freed. */
1302
1303 void
1304 pop_temp_slots ()
1305 {
1306 struct temp_slot *p;
1307
1308 for (p = temp_slots; p; p = p->next)
1309 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1310 p->in_use = 0;
1311
1312 combine_temp_slots ();
1313
1314 temp_slot_level--;
1315 }
1316
1317 /* Initialize temporary slots. */
1318
1319 void
1320 init_temp_slots ()
1321 {
1322 /* We have not allocated any temporaries yet. */
1323 temp_slots = 0;
1324 temp_slot_level = 0;
1325 var_temp_slot_level = 0;
1326 target_temp_slot_level = 0;
1327 }
1328 \f
1329 /* Retroactively move an auto variable from a register to a stack slot.
1330 This is done when an address-reference to the variable is seen. */
1331
1332 void
1333 put_var_into_stack (decl)
1334 tree decl;
1335 {
1336 register rtx reg;
1337 enum machine_mode promoted_mode, decl_mode;
1338 struct function *function = 0;
1339 tree context;
1340 int can_use_addressof;
1341 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1342 int usedp = (TREE_USED (decl)
1343 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1344
1345 context = decl_function_context (decl);
1346
1347 /* Get the current rtl used for this object and its original mode. */
1348 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1349
1350 /* No need to do anything if decl has no rtx yet
1351 since in that case caller is setting TREE_ADDRESSABLE
1352 and a stack slot will be assigned when the rtl is made. */
1353 if (reg == 0)
1354 return;
1355
1356 /* Get the declared mode for this object. */
1357 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1358 : DECL_MODE (decl));
1359 /* Get the mode it's actually stored in. */
1360 promoted_mode = GET_MODE (reg);
1361
1362 /* If this variable comes from an outer function,
1363 find that function's saved context. */
1364 if (context != current_function_decl && context != inline_function_decl)
1365 for (function = outer_function_chain; function; function = function->next)
1366 if (function->decl == context)
1367 break;
1368
1369 /* If this is a variable-size object with a pseudo to address it,
1370 put that pseudo into the stack, if the var is nonlocal. */
1371 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1372 && GET_CODE (reg) == MEM
1373 && GET_CODE (XEXP (reg, 0)) == REG
1374 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1375 {
1376 reg = XEXP (reg, 0);
1377 decl_mode = promoted_mode = GET_MODE (reg);
1378 }
1379
1380 can_use_addressof
1381 = (function == 0
1382 && optimize > 0
1383 /* FIXME make it work for promoted modes too */
1384 && decl_mode == promoted_mode
1385 #ifdef NON_SAVING_SETJMP
1386 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1387 #endif
1388 );
1389
1390 /* If we can't use ADDRESSOF, make sure we see through one we already
1391 generated. */
1392 if (! can_use_addressof && GET_CODE (reg) == MEM
1393 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1394 reg = XEXP (XEXP (reg, 0), 0);
1395
1396 /* Now we should have a value that resides in one or more pseudo regs. */
1397
1398 if (GET_CODE (reg) == REG)
1399 {
1400 /* If this variable lives in the current function and we don't need
1401 to put things in the stack for the sake of setjmp, try to keep it
1402 in a register until we know we actually need the address. */
1403 if (can_use_addressof)
1404 gen_mem_addressof (reg, decl);
1405 else
1406 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1407 decl_mode, volatilep, 0, usedp, 0);
1408 }
1409 else if (GET_CODE (reg) == CONCAT)
1410 {
1411 /* A CONCAT contains two pseudos; put them both in the stack.
1412 We do it so they end up consecutive.
1413 We fixup references to the parts only after we fixup references
1414 to the whole CONCAT, lest we do double fixups for the latter
1415 references. */
1416 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1417 tree part_type = type_for_mode (part_mode, 0);
1418 rtx lopart = XEXP (reg, 0);
1419 rtx hipart = XEXP (reg, 1);
1420 #ifdef FRAME_GROWS_DOWNWARD
1421 /* Since part 0 should have a lower address, do it second. */
1422 put_reg_into_stack (function, hipart, part_type, part_mode,
1423 part_mode, volatilep, 0, 0, 0);
1424 put_reg_into_stack (function, lopart, part_type, part_mode,
1425 part_mode, volatilep, 0, 0, 0);
1426 #else
1427 put_reg_into_stack (function, lopart, part_type, part_mode,
1428 part_mode, volatilep, 0, 0, 0);
1429 put_reg_into_stack (function, hipart, part_type, part_mode,
1430 part_mode, volatilep, 0, 0, 0);
1431 #endif
1432
1433 /* Change the CONCAT into a combined MEM for both parts. */
1434 PUT_CODE (reg, MEM);
1435 set_mem_attributes (reg, decl, 1);
1436
1437 /* The two parts are in memory order already.
1438 Use the lower parts address as ours. */
1439 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1440 /* Prevent sharing of rtl that might lose. */
1441 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1442 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1443 if (usedp)
1444 {
1445 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1446 promoted_mode, 0);
1447 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1448 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1449 }
1450 }
1451 else
1452 return;
1453
1454 if (current_function_check_memory_usage)
1455 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1456 XEXP (reg, 0), Pmode,
1457 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1458 TYPE_MODE (sizetype),
1459 GEN_INT (MEMORY_USE_RW),
1460 TYPE_MODE (integer_type_node));
1461 }
1462
1463 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1464 into the stack frame of FUNCTION (0 means the current function).
1465 DECL_MODE is the machine mode of the user-level data type.
1466 PROMOTED_MODE is the machine mode of the register.
1467 VOLATILE_P is nonzero if this is for a "volatile" decl.
1468 USED_P is nonzero if this reg might have already been used in an insn. */
1469
1470 static void
1471 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1472 original_regno, used_p, ht)
1473 struct function *function;
1474 rtx reg;
1475 tree type;
1476 enum machine_mode promoted_mode, decl_mode;
1477 int volatile_p;
1478 unsigned int original_regno;
1479 int used_p;
1480 struct hash_table *ht;
1481 {
1482 struct function *func = function ? function : cfun;
1483 rtx new = 0;
1484 unsigned int regno = original_regno;
1485
1486 if (regno == 0)
1487 regno = REGNO (reg);
1488
1489 if (regno < func->x_max_parm_reg)
1490 new = func->x_parm_reg_stack_loc[regno];
1491
1492 if (new == 0)
1493 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1494
1495 PUT_CODE (reg, MEM);
1496 PUT_MODE (reg, decl_mode);
1497 XEXP (reg, 0) = XEXP (new, 0);
1498 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1499 MEM_VOLATILE_P (reg) = volatile_p;
1500
1501 /* If this is a memory ref that contains aggregate components,
1502 mark it as such for cse and loop optimize. If we are reusing a
1503 previously generated stack slot, then we need to copy the bit in
1504 case it was set for other reasons. For instance, it is set for
1505 __builtin_va_alist. */
1506 if (type)
1507 {
1508 MEM_SET_IN_STRUCT_P (reg,
1509 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1510 MEM_ALIAS_SET (reg) = get_alias_set (type);
1511 }
1512 if (used_p)
1513 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1514 }
1515
1516 /* Make sure that all refs to the variable, previously made
1517 when it was a register, are fixed up to be valid again.
1518 See function above for meaning of arguments. */
1519 static void
1520 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1521 struct function *function;
1522 rtx reg;
1523 tree type;
1524 enum machine_mode promoted_mode;
1525 struct hash_table *ht;
1526 {
1527 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1528
1529 if (function != 0)
1530 {
1531 struct var_refs_queue *temp;
1532
1533 temp
1534 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1535 temp->modified = reg;
1536 temp->promoted_mode = promoted_mode;
1537 temp->unsignedp = unsigned_p;
1538 temp->next = function->fixup_var_refs_queue;
1539 function->fixup_var_refs_queue = temp;
1540 }
1541 else
1542 /* Variable is local; fix it up now. */
1543 fixup_var_refs (reg, promoted_mode, unsigned_p, ht);
1544 }
1545 \f
1546 static void
1547 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1548 rtx var;
1549 enum machine_mode promoted_mode;
1550 int unsignedp;
1551 struct hash_table *ht;
1552 {
1553 tree pending;
1554 rtx first_insn = get_insns ();
1555 struct sequence_stack *stack = seq_stack;
1556 tree rtl_exps = rtl_expr_chain;
1557 rtx insn;
1558
1559 /* Must scan all insns for stack-refs that exceed the limit. */
1560 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1561 stack == 0, ht);
1562 /* If there's a hash table, it must record all uses of VAR. */
1563 if (ht)
1564 return;
1565
1566 /* Scan all pending sequences too. */
1567 for (; stack; stack = stack->next)
1568 {
1569 push_to_sequence (stack->first);
1570 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1571 stack->first, stack->next != 0, 0);
1572 /* Update remembered end of sequence
1573 in case we added an insn at the end. */
1574 stack->last = get_last_insn ();
1575 end_sequence ();
1576 }
1577
1578 /* Scan all waiting RTL_EXPRs too. */
1579 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1580 {
1581 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1582 if (seq != const0_rtx && seq != 0)
1583 {
1584 push_to_sequence (seq);
1585 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0, 0);
1586 end_sequence ();
1587 }
1588 }
1589
1590 /* Scan the catch clauses for exception handling too. */
1591 push_to_full_sequence (catch_clauses, catch_clauses_last);
1592 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses, 0, 0);
1593 end_full_sequence (&catch_clauses, &catch_clauses_last);
1594
1595 /* Scan sequences saved in CALL_PLACEHOLDERS too. */
1596 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1597 {
1598 if (GET_CODE (insn) == CALL_INSN
1599 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1600 {
1601 int i;
1602
1603 /* Look at the Normal call, sibling call and tail recursion
1604 sequences attached to the CALL_PLACEHOLDER. */
1605 for (i = 0; i < 3; i++)
1606 {
1607 rtx seq = XEXP (PATTERN (insn), i);
1608 if (seq)
1609 {
1610 push_to_sequence (seq);
1611 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1612 seq, 0, 0);
1613 XEXP (PATTERN (insn), i) = get_insns ();
1614 end_sequence ();
1615 }
1616 }
1617 }
1618 }
1619 }
1620 \f
1621 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1622 some part of an insn. Return a struct fixup_replacement whose OLD
1623 value is equal to X. Allocate a new structure if no such entry exists. */
1624
1625 static struct fixup_replacement *
1626 find_fixup_replacement (replacements, x)
1627 struct fixup_replacement **replacements;
1628 rtx x;
1629 {
1630 struct fixup_replacement *p;
1631
1632 /* See if we have already replaced this. */
1633 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1634 ;
1635
1636 if (p == 0)
1637 {
1638 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1639 p->old = x;
1640 p->new = 0;
1641 p->next = *replacements;
1642 *replacements = p;
1643 }
1644
1645 return p;
1646 }
1647
1648 /* Scan the insn-chain starting with INSN for refs to VAR
1649 and fix them up. TOPLEVEL is nonzero if this chain is the
1650 main chain of insns for the current function. */
1651
1652 static void
1653 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1654 rtx var;
1655 enum machine_mode promoted_mode;
1656 int unsignedp;
1657 rtx insn;
1658 int toplevel;
1659 struct hash_table *ht;
1660 {
1661 rtx call_dest = 0;
1662 rtx insn_list = NULL_RTX;
1663
1664 /* If we already know which INSNs reference VAR there's no need
1665 to walk the entire instruction chain. */
1666 if (ht)
1667 {
1668 insn_list = ((struct insns_for_mem_entry *)
1669 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1670 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1671 insn_list = XEXP (insn_list, 1);
1672 }
1673
1674 while (insn)
1675 {
1676 rtx next = NEXT_INSN (insn);
1677 rtx set, prev, prev_set;
1678 rtx note;
1679
1680 if (INSN_P (insn))
1681 {
1682 /* Remember the notes in case we delete the insn. */
1683 note = REG_NOTES (insn);
1684
1685 /* If this is a CLOBBER of VAR, delete it.
1686
1687 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1688 and REG_RETVAL notes too. */
1689 if (GET_CODE (PATTERN (insn)) == CLOBBER
1690 && (XEXP (PATTERN (insn), 0) == var
1691 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1692 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1693 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1694 {
1695 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1696 /* The REG_LIBCALL note will go away since we are going to
1697 turn INSN into a NOTE, so just delete the
1698 corresponding REG_RETVAL note. */
1699 remove_note (XEXP (note, 0),
1700 find_reg_note (XEXP (note, 0), REG_RETVAL,
1701 NULL_RTX));
1702
1703 /* In unoptimized compilation, we shouldn't call delete_insn
1704 except in jump.c doing warnings. */
1705 PUT_CODE (insn, NOTE);
1706 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1707 NOTE_SOURCE_FILE (insn) = 0;
1708 }
1709
1710 /* The insn to load VAR from a home in the arglist
1711 is now a no-op. When we see it, just delete it.
1712 Similarly if this is storing VAR from a register from which
1713 it was loaded in the previous insn. This will occur
1714 when an ADDRESSOF was made for an arglist slot. */
1715 else if (toplevel
1716 && (set = single_set (insn)) != 0
1717 && SET_DEST (set) == var
1718 /* If this represents the result of an insn group,
1719 don't delete the insn. */
1720 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1721 && (rtx_equal_p (SET_SRC (set), var)
1722 || (GET_CODE (SET_SRC (set)) == REG
1723 && (prev = prev_nonnote_insn (insn)) != 0
1724 && (prev_set = single_set (prev)) != 0
1725 && SET_DEST (prev_set) == SET_SRC (set)
1726 && rtx_equal_p (SET_SRC (prev_set), var))))
1727 {
1728 /* In unoptimized compilation, we shouldn't call delete_insn
1729 except in jump.c doing warnings. */
1730 PUT_CODE (insn, NOTE);
1731 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1732 NOTE_SOURCE_FILE (insn) = 0;
1733 if (insn == last_parm_insn)
1734 last_parm_insn = PREV_INSN (next);
1735 }
1736 else
1737 {
1738 struct fixup_replacement *replacements = 0;
1739 rtx next_insn = NEXT_INSN (insn);
1740
1741 if (SMALL_REGISTER_CLASSES)
1742 {
1743 /* If the insn that copies the results of a CALL_INSN
1744 into a pseudo now references VAR, we have to use an
1745 intermediate pseudo since we want the life of the
1746 return value register to be only a single insn.
1747
1748 If we don't use an intermediate pseudo, such things as
1749 address computations to make the address of VAR valid
1750 if it is not can be placed between the CALL_INSN and INSN.
1751
1752 To make sure this doesn't happen, we record the destination
1753 of the CALL_INSN and see if the next insn uses both that
1754 and VAR. */
1755
1756 if (call_dest != 0 && GET_CODE (insn) == INSN
1757 && reg_mentioned_p (var, PATTERN (insn))
1758 && reg_mentioned_p (call_dest, PATTERN (insn)))
1759 {
1760 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1761
1762 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1763
1764 PATTERN (insn) = replace_rtx (PATTERN (insn),
1765 call_dest, temp);
1766 }
1767
1768 if (GET_CODE (insn) == CALL_INSN
1769 && GET_CODE (PATTERN (insn)) == SET)
1770 call_dest = SET_DEST (PATTERN (insn));
1771 else if (GET_CODE (insn) == CALL_INSN
1772 && GET_CODE (PATTERN (insn)) == PARALLEL
1773 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1774 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1775 else
1776 call_dest = 0;
1777 }
1778
1779 /* See if we have to do anything to INSN now that VAR is in
1780 memory. If it needs to be loaded into a pseudo, use a single
1781 pseudo for the entire insn in case there is a MATCH_DUP
1782 between two operands. We pass a pointer to the head of
1783 a list of struct fixup_replacements. If fixup_var_refs_1
1784 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1785 it will record them in this list.
1786
1787 If it allocated a pseudo for any replacement, we copy into
1788 it here. */
1789
1790 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1791 &replacements);
1792
1793 /* If this is last_parm_insn, and any instructions were output
1794 after it to fix it up, then we must set last_parm_insn to
1795 the last such instruction emitted. */
1796 if (insn == last_parm_insn)
1797 last_parm_insn = PREV_INSN (next_insn);
1798
1799 while (replacements)
1800 {
1801 if (GET_CODE (replacements->new) == REG)
1802 {
1803 rtx insert_before;
1804 rtx seq;
1805
1806 /* OLD might be a (subreg (mem)). */
1807 if (GET_CODE (replacements->old) == SUBREG)
1808 replacements->old
1809 = fixup_memory_subreg (replacements->old, insn, 0);
1810 else
1811 replacements->old
1812 = fixup_stack_1 (replacements->old, insn);
1813
1814 insert_before = insn;
1815
1816 /* If we are changing the mode, do a conversion.
1817 This might be wasteful, but combine.c will
1818 eliminate much of the waste. */
1819
1820 if (GET_MODE (replacements->new)
1821 != GET_MODE (replacements->old))
1822 {
1823 start_sequence ();
1824 convert_move (replacements->new,
1825 replacements->old, unsignedp);
1826 seq = gen_sequence ();
1827 end_sequence ();
1828 }
1829 else
1830 seq = gen_move_insn (replacements->new,
1831 replacements->old);
1832
1833 emit_insn_before (seq, insert_before);
1834 }
1835
1836 replacements = replacements->next;
1837 }
1838 }
1839
1840 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1841 But don't touch other insns referred to by reg-notes;
1842 we will get them elsewhere. */
1843 while (note)
1844 {
1845 if (GET_CODE (note) != INSN_LIST)
1846 XEXP (note, 0)
1847 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1848 note = XEXP (note, 1);
1849 }
1850 }
1851
1852 if (!ht)
1853 insn = next;
1854 else if (insn_list)
1855 {
1856 insn = XEXP (insn_list, 0);
1857 insn_list = XEXP (insn_list, 1);
1858 }
1859 else
1860 insn = NULL_RTX;
1861 }
1862 }
1863 \f
1864 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1865 See if the rtx expression at *LOC in INSN needs to be changed.
1866
1867 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1868 contain a list of original rtx's and replacements. If we find that we need
1869 to modify this insn by replacing a memory reference with a pseudo or by
1870 making a new MEM to implement a SUBREG, we consult that list to see if
1871 we have already chosen a replacement. If none has already been allocated,
1872 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1873 or the SUBREG, as appropriate, to the pseudo. */
1874
1875 static void
1876 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1877 register rtx var;
1878 enum machine_mode promoted_mode;
1879 register rtx *loc;
1880 rtx insn;
1881 struct fixup_replacement **replacements;
1882 {
1883 register int i;
1884 register rtx x = *loc;
1885 RTX_CODE code = GET_CODE (x);
1886 register const char *fmt;
1887 register rtx tem, tem1;
1888 struct fixup_replacement *replacement;
1889
1890 switch (code)
1891 {
1892 case ADDRESSOF:
1893 if (XEXP (x, 0) == var)
1894 {
1895 /* Prevent sharing of rtl that might lose. */
1896 rtx sub = copy_rtx (XEXP (var, 0));
1897
1898 if (! validate_change (insn, loc, sub, 0))
1899 {
1900 rtx y = gen_reg_rtx (GET_MODE (sub));
1901 rtx seq, new_insn;
1902
1903 /* We should be able to replace with a register or all is lost.
1904 Note that we can't use validate_change to verify this, since
1905 we're not caring for replacing all dups simultaneously. */
1906 if (! validate_replace_rtx (*loc, y, insn))
1907 abort ();
1908
1909 /* Careful! First try to recognize a direct move of the
1910 value, mimicking how things are done in gen_reload wrt
1911 PLUS. Consider what happens when insn is a conditional
1912 move instruction and addsi3 clobbers flags. */
1913
1914 start_sequence ();
1915 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1916 seq = gen_sequence ();
1917 end_sequence ();
1918
1919 if (recog_memoized (new_insn) < 0)
1920 {
1921 /* That failed. Fall back on force_operand and hope. */
1922
1923 start_sequence ();
1924 force_operand (sub, y);
1925 seq = gen_sequence ();
1926 end_sequence ();
1927 }
1928
1929 #ifdef HAVE_cc0
1930 /* Don't separate setter from user. */
1931 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1932 insn = PREV_INSN (insn);
1933 #endif
1934
1935 emit_insn_before (seq, insn);
1936 }
1937 }
1938 return;
1939
1940 case MEM:
1941 if (var == x)
1942 {
1943 /* If we already have a replacement, use it. Otherwise,
1944 try to fix up this address in case it is invalid. */
1945
1946 replacement = find_fixup_replacement (replacements, var);
1947 if (replacement->new)
1948 {
1949 *loc = replacement->new;
1950 return;
1951 }
1952
1953 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1954
1955 /* Unless we are forcing memory to register or we changed the mode,
1956 we can leave things the way they are if the insn is valid. */
1957
1958 INSN_CODE (insn) = -1;
1959 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1960 && recog_memoized (insn) >= 0)
1961 return;
1962
1963 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1964 return;
1965 }
1966
1967 /* If X contains VAR, we need to unshare it here so that we update
1968 each occurrence separately. But all identical MEMs in one insn
1969 must be replaced with the same rtx because of the possibility of
1970 MATCH_DUPs. */
1971
1972 if (reg_mentioned_p (var, x))
1973 {
1974 replacement = find_fixup_replacement (replacements, x);
1975 if (replacement->new == 0)
1976 replacement->new = copy_most_rtx (x, var);
1977
1978 *loc = x = replacement->new;
1979 }
1980 break;
1981
1982 case REG:
1983 case CC0:
1984 case PC:
1985 case CONST_INT:
1986 case CONST:
1987 case SYMBOL_REF:
1988 case LABEL_REF:
1989 case CONST_DOUBLE:
1990 return;
1991
1992 case SIGN_EXTRACT:
1993 case ZERO_EXTRACT:
1994 /* Note that in some cases those types of expressions are altered
1995 by optimize_bit_field, and do not survive to get here. */
1996 if (XEXP (x, 0) == var
1997 || (GET_CODE (XEXP (x, 0)) == SUBREG
1998 && SUBREG_REG (XEXP (x, 0)) == var))
1999 {
2000 /* Get TEM as a valid MEM in the mode presently in the insn.
2001
2002 We don't worry about the possibility of MATCH_DUP here; it
2003 is highly unlikely and would be tricky to handle. */
2004
2005 tem = XEXP (x, 0);
2006 if (GET_CODE (tem) == SUBREG)
2007 {
2008 if (GET_MODE_BITSIZE (GET_MODE (tem))
2009 > GET_MODE_BITSIZE (GET_MODE (var)))
2010 {
2011 replacement = find_fixup_replacement (replacements, var);
2012 if (replacement->new == 0)
2013 replacement->new = gen_reg_rtx (GET_MODE (var));
2014 SUBREG_REG (tem) = replacement->new;
2015 }
2016 else
2017 tem = fixup_memory_subreg (tem, insn, 0);
2018 }
2019 else
2020 tem = fixup_stack_1 (tem, insn);
2021
2022 /* Unless we want to load from memory, get TEM into the proper mode
2023 for an extract from memory. This can only be done if the
2024 extract is at a constant position and length. */
2025
2026 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2027 && GET_CODE (XEXP (x, 2)) == CONST_INT
2028 && ! mode_dependent_address_p (XEXP (tem, 0))
2029 && ! MEM_VOLATILE_P (tem))
2030 {
2031 enum machine_mode wanted_mode = VOIDmode;
2032 enum machine_mode is_mode = GET_MODE (tem);
2033 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2034
2035 #ifdef HAVE_extzv
2036 if (GET_CODE (x) == ZERO_EXTRACT)
2037 {
2038 wanted_mode
2039 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
2040 if (wanted_mode == VOIDmode)
2041 wanted_mode = word_mode;
2042 }
2043 #endif
2044 #ifdef HAVE_extv
2045 if (GET_CODE (x) == SIGN_EXTRACT)
2046 {
2047 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
2048 if (wanted_mode == VOIDmode)
2049 wanted_mode = word_mode;
2050 }
2051 #endif
2052 /* If we have a narrower mode, we can do something. */
2053 if (wanted_mode != VOIDmode
2054 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2055 {
2056 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2057 rtx old_pos = XEXP (x, 2);
2058 rtx newmem;
2059
2060 /* If the bytes and bits are counted differently, we
2061 must adjust the offset. */
2062 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2063 offset = (GET_MODE_SIZE (is_mode)
2064 - GET_MODE_SIZE (wanted_mode) - offset);
2065
2066 pos %= GET_MODE_BITSIZE (wanted_mode);
2067
2068 newmem = gen_rtx_MEM (wanted_mode,
2069 plus_constant (XEXP (tem, 0), offset));
2070 MEM_COPY_ATTRIBUTES (newmem, tem);
2071
2072 /* Make the change and see if the insn remains valid. */
2073 INSN_CODE (insn) = -1;
2074 XEXP (x, 0) = newmem;
2075 XEXP (x, 2) = GEN_INT (pos);
2076
2077 if (recog_memoized (insn) >= 0)
2078 return;
2079
2080 /* Otherwise, restore old position. XEXP (x, 0) will be
2081 restored later. */
2082 XEXP (x, 2) = old_pos;
2083 }
2084 }
2085
2086 /* If we get here, the bitfield extract insn can't accept a memory
2087 reference. Copy the input into a register. */
2088
2089 tem1 = gen_reg_rtx (GET_MODE (tem));
2090 emit_insn_before (gen_move_insn (tem1, tem), insn);
2091 XEXP (x, 0) = tem1;
2092 return;
2093 }
2094 break;
2095
2096 case SUBREG:
2097 if (SUBREG_REG (x) == var)
2098 {
2099 /* If this is a special SUBREG made because VAR was promoted
2100 from a wider mode, replace it with VAR and call ourself
2101 recursively, this time saying that the object previously
2102 had its current mode (by virtue of the SUBREG). */
2103
2104 if (SUBREG_PROMOTED_VAR_P (x))
2105 {
2106 *loc = var;
2107 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2108 return;
2109 }
2110
2111 /* If this SUBREG makes VAR wider, it has become a paradoxical
2112 SUBREG with VAR in memory, but these aren't allowed at this
2113 stage of the compilation. So load VAR into a pseudo and take
2114 a SUBREG of that pseudo. */
2115 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2116 {
2117 replacement = find_fixup_replacement (replacements, var);
2118 if (replacement->new == 0)
2119 replacement->new = gen_reg_rtx (GET_MODE (var));
2120 SUBREG_REG (x) = replacement->new;
2121 return;
2122 }
2123
2124 /* See if we have already found a replacement for this SUBREG.
2125 If so, use it. Otherwise, make a MEM and see if the insn
2126 is recognized. If not, or if we should force MEM into a register,
2127 make a pseudo for this SUBREG. */
2128 replacement = find_fixup_replacement (replacements, x);
2129 if (replacement->new)
2130 {
2131 *loc = replacement->new;
2132 return;
2133 }
2134
2135 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2136
2137 INSN_CODE (insn) = -1;
2138 if (! flag_force_mem && recog_memoized (insn) >= 0)
2139 return;
2140
2141 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2142 return;
2143 }
2144 break;
2145
2146 case SET:
2147 /* First do special simplification of bit-field references. */
2148 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2149 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2150 optimize_bit_field (x, insn, 0);
2151 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2152 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2153 optimize_bit_field (x, insn, NULL_PTR);
2154
2155 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2156 into a register and then store it back out. */
2157 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2158 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2159 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2160 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2161 > GET_MODE_SIZE (GET_MODE (var))))
2162 {
2163 replacement = find_fixup_replacement (replacements, var);
2164 if (replacement->new == 0)
2165 replacement->new = gen_reg_rtx (GET_MODE (var));
2166
2167 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2168 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2169 }
2170
2171 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2172 insn into a pseudo and store the low part of the pseudo into VAR. */
2173 if (GET_CODE (SET_DEST (x)) == SUBREG
2174 && SUBREG_REG (SET_DEST (x)) == var
2175 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2176 > GET_MODE_SIZE (GET_MODE (var))))
2177 {
2178 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2179 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2180 tem)),
2181 insn);
2182 break;
2183 }
2184
2185 {
2186 rtx dest = SET_DEST (x);
2187 rtx src = SET_SRC (x);
2188 #ifdef HAVE_insv
2189 rtx outerdest = dest;
2190 #endif
2191
2192 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2193 || GET_CODE (dest) == SIGN_EXTRACT
2194 || GET_CODE (dest) == ZERO_EXTRACT)
2195 dest = XEXP (dest, 0);
2196
2197 if (GET_CODE (src) == SUBREG)
2198 src = XEXP (src, 0);
2199
2200 /* If VAR does not appear at the top level of the SET
2201 just scan the lower levels of the tree. */
2202
2203 if (src != var && dest != var)
2204 break;
2205
2206 /* We will need to rerecognize this insn. */
2207 INSN_CODE (insn) = -1;
2208
2209 #ifdef HAVE_insv
2210 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2211 {
2212 /* Since this case will return, ensure we fixup all the
2213 operands here. */
2214 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2215 insn, replacements);
2216 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2217 insn, replacements);
2218 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2219 insn, replacements);
2220
2221 tem = XEXP (outerdest, 0);
2222
2223 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2224 that may appear inside a ZERO_EXTRACT.
2225 This was legitimate when the MEM was a REG. */
2226 if (GET_CODE (tem) == SUBREG
2227 && SUBREG_REG (tem) == var)
2228 tem = fixup_memory_subreg (tem, insn, 0);
2229 else
2230 tem = fixup_stack_1 (tem, insn);
2231
2232 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2233 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2234 && ! mode_dependent_address_p (XEXP (tem, 0))
2235 && ! MEM_VOLATILE_P (tem))
2236 {
2237 enum machine_mode wanted_mode;
2238 enum machine_mode is_mode = GET_MODE (tem);
2239 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2240
2241 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2242 if (wanted_mode == VOIDmode)
2243 wanted_mode = word_mode;
2244
2245 /* If we have a narrower mode, we can do something. */
2246 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2247 {
2248 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2249 rtx old_pos = XEXP (outerdest, 2);
2250 rtx newmem;
2251
2252 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2253 offset = (GET_MODE_SIZE (is_mode)
2254 - GET_MODE_SIZE (wanted_mode) - offset);
2255
2256 pos %= GET_MODE_BITSIZE (wanted_mode);
2257
2258 newmem = gen_rtx_MEM (wanted_mode,
2259 plus_constant (XEXP (tem, 0),
2260 offset));
2261 MEM_COPY_ATTRIBUTES (newmem, tem);
2262
2263 /* Make the change and see if the insn remains valid. */
2264 INSN_CODE (insn) = -1;
2265 XEXP (outerdest, 0) = newmem;
2266 XEXP (outerdest, 2) = GEN_INT (pos);
2267
2268 if (recog_memoized (insn) >= 0)
2269 return;
2270
2271 /* Otherwise, restore old position. XEXP (x, 0) will be
2272 restored later. */
2273 XEXP (outerdest, 2) = old_pos;
2274 }
2275 }
2276
2277 /* If we get here, the bit-field store doesn't allow memory
2278 or isn't located at a constant position. Load the value into
2279 a register, do the store, and put it back into memory. */
2280
2281 tem1 = gen_reg_rtx (GET_MODE (tem));
2282 emit_insn_before (gen_move_insn (tem1, tem), insn);
2283 emit_insn_after (gen_move_insn (tem, tem1), insn);
2284 XEXP (outerdest, 0) = tem1;
2285 return;
2286 }
2287 #endif
2288
2289 /* STRICT_LOW_PART is a no-op on memory references
2290 and it can cause combinations to be unrecognizable,
2291 so eliminate it. */
2292
2293 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2294 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2295
2296 /* A valid insn to copy VAR into or out of a register
2297 must be left alone, to avoid an infinite loop here.
2298 If the reference to VAR is by a subreg, fix that up,
2299 since SUBREG is not valid for a memref.
2300 Also fix up the address of the stack slot.
2301
2302 Note that we must not try to recognize the insn until
2303 after we know that we have valid addresses and no
2304 (subreg (mem ...) ...) constructs, since these interfere
2305 with determining the validity of the insn. */
2306
2307 if ((SET_SRC (x) == var
2308 || (GET_CODE (SET_SRC (x)) == SUBREG
2309 && SUBREG_REG (SET_SRC (x)) == var))
2310 && (GET_CODE (SET_DEST (x)) == REG
2311 || (GET_CODE (SET_DEST (x)) == SUBREG
2312 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2313 && GET_MODE (var) == promoted_mode
2314 && x == single_set (insn))
2315 {
2316 rtx pat;
2317
2318 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2319 if (replacement->new)
2320 SET_SRC (x) = replacement->new;
2321 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2322 SET_SRC (x) = replacement->new
2323 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2324 else
2325 SET_SRC (x) = replacement->new
2326 = fixup_stack_1 (SET_SRC (x), insn);
2327
2328 if (recog_memoized (insn) >= 0)
2329 return;
2330
2331 /* INSN is not valid, but we know that we want to
2332 copy SET_SRC (x) to SET_DEST (x) in some way. So
2333 we generate the move and see whether it requires more
2334 than one insn. If it does, we emit those insns and
2335 delete INSN. Otherwise, we an just replace the pattern
2336 of INSN; we have already verified above that INSN has
2337 no other function that to do X. */
2338
2339 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2340 if (GET_CODE (pat) == SEQUENCE)
2341 {
2342 emit_insn_after (pat, insn);
2343 PUT_CODE (insn, NOTE);
2344 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2345 NOTE_SOURCE_FILE (insn) = 0;
2346 }
2347 else
2348 PATTERN (insn) = pat;
2349
2350 return;
2351 }
2352
2353 if ((SET_DEST (x) == var
2354 || (GET_CODE (SET_DEST (x)) == SUBREG
2355 && SUBREG_REG (SET_DEST (x)) == var))
2356 && (GET_CODE (SET_SRC (x)) == REG
2357 || (GET_CODE (SET_SRC (x)) == SUBREG
2358 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2359 && GET_MODE (var) == promoted_mode
2360 && x == single_set (insn))
2361 {
2362 rtx pat;
2363
2364 if (GET_CODE (SET_DEST (x)) == SUBREG)
2365 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2366 else
2367 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2368
2369 if (recog_memoized (insn) >= 0)
2370 return;
2371
2372 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2373 if (GET_CODE (pat) == SEQUENCE)
2374 {
2375 emit_insn_after (pat, insn);
2376 PUT_CODE (insn, NOTE);
2377 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2378 NOTE_SOURCE_FILE (insn) = 0;
2379 }
2380 else
2381 PATTERN (insn) = pat;
2382
2383 return;
2384 }
2385
2386 /* Otherwise, storing into VAR must be handled specially
2387 by storing into a temporary and copying that into VAR
2388 with a new insn after this one. Note that this case
2389 will be used when storing into a promoted scalar since
2390 the insn will now have different modes on the input
2391 and output and hence will be invalid (except for the case
2392 of setting it to a constant, which does not need any
2393 change if it is valid). We generate extra code in that case,
2394 but combine.c will eliminate it. */
2395
2396 if (dest == var)
2397 {
2398 rtx temp;
2399 rtx fixeddest = SET_DEST (x);
2400
2401 /* STRICT_LOW_PART can be discarded, around a MEM. */
2402 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2403 fixeddest = XEXP (fixeddest, 0);
2404 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2405 if (GET_CODE (fixeddest) == SUBREG)
2406 {
2407 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2408 promoted_mode = GET_MODE (fixeddest);
2409 }
2410 else
2411 fixeddest = fixup_stack_1 (fixeddest, insn);
2412
2413 temp = gen_reg_rtx (promoted_mode);
2414
2415 emit_insn_after (gen_move_insn (fixeddest,
2416 gen_lowpart (GET_MODE (fixeddest),
2417 temp)),
2418 insn);
2419
2420 SET_DEST (x) = temp;
2421 }
2422 }
2423
2424 default:
2425 break;
2426 }
2427
2428 /* Nothing special about this RTX; fix its operands. */
2429
2430 fmt = GET_RTX_FORMAT (code);
2431 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2432 {
2433 if (fmt[i] == 'e')
2434 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2435 else if (fmt[i] == 'E')
2436 {
2437 register int j;
2438 for (j = 0; j < XVECLEN (x, i); j++)
2439 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2440 insn, replacements);
2441 }
2442 }
2443 }
2444 \f
2445 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2446 return an rtx (MEM:m1 newaddr) which is equivalent.
2447 If any insns must be emitted to compute NEWADDR, put them before INSN.
2448
2449 UNCRITICAL nonzero means accept paradoxical subregs.
2450 This is used for subregs found inside REG_NOTES. */
2451
2452 static rtx
2453 fixup_memory_subreg (x, insn, uncritical)
2454 rtx x;
2455 rtx insn;
2456 int uncritical;
2457 {
2458 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2459 rtx addr = XEXP (SUBREG_REG (x), 0);
2460 enum machine_mode mode = GET_MODE (x);
2461 rtx result;
2462
2463 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2464 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2465 && ! uncritical)
2466 abort ();
2467
2468 if (BYTES_BIG_ENDIAN)
2469 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2470 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2471 addr = plus_constant (addr, offset);
2472 if (!flag_force_addr && memory_address_p (mode, addr))
2473 /* Shortcut if no insns need be emitted. */
2474 return change_address (SUBREG_REG (x), mode, addr);
2475 start_sequence ();
2476 result = change_address (SUBREG_REG (x), mode, addr);
2477 emit_insn_before (gen_sequence (), insn);
2478 end_sequence ();
2479 return result;
2480 }
2481
2482 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2483 Replace subexpressions of X in place.
2484 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2485 Otherwise return X, with its contents possibly altered.
2486
2487 If any insns must be emitted to compute NEWADDR, put them before INSN.
2488
2489 UNCRITICAL is as in fixup_memory_subreg. */
2490
2491 static rtx
2492 walk_fixup_memory_subreg (x, insn, uncritical)
2493 register rtx x;
2494 rtx insn;
2495 int uncritical;
2496 {
2497 register enum rtx_code code;
2498 register const char *fmt;
2499 register int i;
2500
2501 if (x == 0)
2502 return 0;
2503
2504 code = GET_CODE (x);
2505
2506 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2507 return fixup_memory_subreg (x, insn, uncritical);
2508
2509 /* Nothing special about this RTX; fix its operands. */
2510
2511 fmt = GET_RTX_FORMAT (code);
2512 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2513 {
2514 if (fmt[i] == 'e')
2515 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2516 else if (fmt[i] == 'E')
2517 {
2518 register int j;
2519 for (j = 0; j < XVECLEN (x, i); j++)
2520 XVECEXP (x, i, j)
2521 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2522 }
2523 }
2524 return x;
2525 }
2526 \f
2527 /* For each memory ref within X, if it refers to a stack slot
2528 with an out of range displacement, put the address in a temp register
2529 (emitting new insns before INSN to load these registers)
2530 and alter the memory ref to use that register.
2531 Replace each such MEM rtx with a copy, to avoid clobberage. */
2532
2533 static rtx
2534 fixup_stack_1 (x, insn)
2535 rtx x;
2536 rtx insn;
2537 {
2538 register int i;
2539 register RTX_CODE code = GET_CODE (x);
2540 register const char *fmt;
2541
2542 if (code == MEM)
2543 {
2544 register rtx ad = XEXP (x, 0);
2545 /* If we have address of a stack slot but it's not valid
2546 (displacement is too large), compute the sum in a register. */
2547 if (GET_CODE (ad) == PLUS
2548 && GET_CODE (XEXP (ad, 0)) == REG
2549 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2550 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2551 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2552 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2553 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2554 #endif
2555 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2556 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2557 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2558 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2559 {
2560 rtx temp, seq;
2561 if (memory_address_p (GET_MODE (x), ad))
2562 return x;
2563
2564 start_sequence ();
2565 temp = copy_to_reg (ad);
2566 seq = gen_sequence ();
2567 end_sequence ();
2568 emit_insn_before (seq, insn);
2569 return change_address (x, VOIDmode, temp);
2570 }
2571 return x;
2572 }
2573
2574 fmt = GET_RTX_FORMAT (code);
2575 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2576 {
2577 if (fmt[i] == 'e')
2578 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2579 else if (fmt[i] == 'E')
2580 {
2581 register int j;
2582 for (j = 0; j < XVECLEN (x, i); j++)
2583 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2584 }
2585 }
2586 return x;
2587 }
2588 \f
2589 /* Optimization: a bit-field instruction whose field
2590 happens to be a byte or halfword in memory
2591 can be changed to a move instruction.
2592
2593 We call here when INSN is an insn to examine or store into a bit-field.
2594 BODY is the SET-rtx to be altered.
2595
2596 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2597 (Currently this is called only from function.c, and EQUIV_MEM
2598 is always 0.) */
2599
2600 static void
2601 optimize_bit_field (body, insn, equiv_mem)
2602 rtx body;
2603 rtx insn;
2604 rtx *equiv_mem;
2605 {
2606 register rtx bitfield;
2607 int destflag;
2608 rtx seq = 0;
2609 enum machine_mode mode;
2610
2611 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2612 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2613 bitfield = SET_DEST (body), destflag = 1;
2614 else
2615 bitfield = SET_SRC (body), destflag = 0;
2616
2617 /* First check that the field being stored has constant size and position
2618 and is in fact a byte or halfword suitably aligned. */
2619
2620 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2621 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2622 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2623 != BLKmode)
2624 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2625 {
2626 register rtx memref = 0;
2627
2628 /* Now check that the containing word is memory, not a register,
2629 and that it is safe to change the machine mode. */
2630
2631 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2632 memref = XEXP (bitfield, 0);
2633 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2634 && equiv_mem != 0)
2635 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2636 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2637 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2638 memref = SUBREG_REG (XEXP (bitfield, 0));
2639 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2640 && equiv_mem != 0
2641 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2642 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2643
2644 if (memref
2645 && ! mode_dependent_address_p (XEXP (memref, 0))
2646 && ! MEM_VOLATILE_P (memref))
2647 {
2648 /* Now adjust the address, first for any subreg'ing
2649 that we are now getting rid of,
2650 and then for which byte of the word is wanted. */
2651
2652 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2653 rtx insns;
2654
2655 /* Adjust OFFSET to count bits from low-address byte. */
2656 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2657 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2658 - offset - INTVAL (XEXP (bitfield, 1)));
2659
2660 /* Adjust OFFSET to count bytes from low-address byte. */
2661 offset /= BITS_PER_UNIT;
2662 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2663 {
2664 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2665 if (BYTES_BIG_ENDIAN)
2666 offset -= (MIN (UNITS_PER_WORD,
2667 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2668 - MIN (UNITS_PER_WORD,
2669 GET_MODE_SIZE (GET_MODE (memref))));
2670 }
2671
2672 start_sequence ();
2673 memref = change_address (memref, mode,
2674 plus_constant (XEXP (memref, 0), offset));
2675 insns = get_insns ();
2676 end_sequence ();
2677 emit_insns_before (insns, insn);
2678
2679 /* Store this memory reference where
2680 we found the bit field reference. */
2681
2682 if (destflag)
2683 {
2684 validate_change (insn, &SET_DEST (body), memref, 1);
2685 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2686 {
2687 rtx src = SET_SRC (body);
2688 while (GET_CODE (src) == SUBREG
2689 && SUBREG_WORD (src) == 0)
2690 src = SUBREG_REG (src);
2691 if (GET_MODE (src) != GET_MODE (memref))
2692 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2693 validate_change (insn, &SET_SRC (body), src, 1);
2694 }
2695 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2696 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2697 /* This shouldn't happen because anything that didn't have
2698 one of these modes should have got converted explicitly
2699 and then referenced through a subreg.
2700 This is so because the original bit-field was
2701 handled by agg_mode and so its tree structure had
2702 the same mode that memref now has. */
2703 abort ();
2704 }
2705 else
2706 {
2707 rtx dest = SET_DEST (body);
2708
2709 while (GET_CODE (dest) == SUBREG
2710 && SUBREG_WORD (dest) == 0
2711 && (GET_MODE_CLASS (GET_MODE (dest))
2712 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2713 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2714 <= UNITS_PER_WORD))
2715 dest = SUBREG_REG (dest);
2716
2717 validate_change (insn, &SET_DEST (body), dest, 1);
2718
2719 if (GET_MODE (dest) == GET_MODE (memref))
2720 validate_change (insn, &SET_SRC (body), memref, 1);
2721 else
2722 {
2723 /* Convert the mem ref to the destination mode. */
2724 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2725
2726 start_sequence ();
2727 convert_move (newreg, memref,
2728 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2729 seq = get_insns ();
2730 end_sequence ();
2731
2732 validate_change (insn, &SET_SRC (body), newreg, 1);
2733 }
2734 }
2735
2736 /* See if we can convert this extraction or insertion into
2737 a simple move insn. We might not be able to do so if this
2738 was, for example, part of a PARALLEL.
2739
2740 If we succeed, write out any needed conversions. If we fail,
2741 it is hard to guess why we failed, so don't do anything
2742 special; just let the optimization be suppressed. */
2743
2744 if (apply_change_group () && seq)
2745 emit_insns_before (seq, insn);
2746 }
2747 }
2748 }
2749 \f
2750 /* These routines are responsible for converting virtual register references
2751 to the actual hard register references once RTL generation is complete.
2752
2753 The following four variables are used for communication between the
2754 routines. They contain the offsets of the virtual registers from their
2755 respective hard registers. */
2756
2757 static int in_arg_offset;
2758 static int var_offset;
2759 static int dynamic_offset;
2760 static int out_arg_offset;
2761 static int cfa_offset;
2762
2763 /* In most machines, the stack pointer register is equivalent to the bottom
2764 of the stack. */
2765
2766 #ifndef STACK_POINTER_OFFSET
2767 #define STACK_POINTER_OFFSET 0
2768 #endif
2769
2770 /* If not defined, pick an appropriate default for the offset of dynamically
2771 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2772 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2773
2774 #ifndef STACK_DYNAMIC_OFFSET
2775
2776 /* The bottom of the stack points to the actual arguments. If
2777 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2778 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2779 stack space for register parameters is not pushed by the caller, but
2780 rather part of the fixed stack areas and hence not included in
2781 `current_function_outgoing_args_size'. Nevertheless, we must allow
2782 for it when allocating stack dynamic objects. */
2783
2784 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2785 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2786 ((ACCUMULATE_OUTGOING_ARGS \
2787 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2788 + (STACK_POINTER_OFFSET)) \
2789
2790 #else
2791 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2792 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2793 + (STACK_POINTER_OFFSET))
2794 #endif
2795 #endif
2796
2797 /* On most machines, the CFA coincides with the first incoming parm. */
2798
2799 #ifndef ARG_POINTER_CFA_OFFSET
2800 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2801 #endif
2802
2803 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2804 its address taken. DECL is the decl for the object stored in the
2805 register, for later use if we do need to force REG into the stack.
2806 REG is overwritten by the MEM like in put_reg_into_stack. */
2807
2808 rtx
2809 gen_mem_addressof (reg, decl)
2810 rtx reg;
2811 tree decl;
2812 {
2813 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2814 REGNO (reg), decl);
2815
2816 /* If the original REG was a user-variable, then so is the REG whose
2817 address is being taken. Likewise for unchanging. */
2818 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2819 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2820
2821 PUT_CODE (reg, MEM);
2822 XEXP (reg, 0) = r;
2823 if (decl)
2824 {
2825 tree type = TREE_TYPE (decl);
2826
2827 PUT_MODE (reg, DECL_MODE (decl));
2828 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2829 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2830 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2831
2832 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2833 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2834 }
2835 else
2836 fixup_var_refs (reg, GET_MODE (reg), 0, 0);
2837
2838 return reg;
2839 }
2840
2841 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2842
2843 void
2844 flush_addressof (decl)
2845 tree decl;
2846 {
2847 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2848 && DECL_RTL (decl) != 0
2849 && GET_CODE (DECL_RTL (decl)) == MEM
2850 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2851 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2852 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2853 }
2854
2855 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2856
2857 static void
2858 put_addressof_into_stack (r, ht)
2859 rtx r;
2860 struct hash_table *ht;
2861 {
2862 tree decl, type;
2863 int volatile_p, used_p;
2864
2865 rtx reg = XEXP (r, 0);
2866
2867 if (GET_CODE (reg) != REG)
2868 abort ();
2869
2870 decl = ADDRESSOF_DECL (r);
2871 if (decl)
2872 {
2873 type = TREE_TYPE (decl);
2874 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2875 && TREE_THIS_VOLATILE (decl));
2876 used_p = (TREE_USED (decl)
2877 || (TREE_CODE (decl) != SAVE_EXPR
2878 && DECL_INITIAL (decl) != 0));
2879 }
2880 else
2881 {
2882 type = NULL_TREE;
2883 volatile_p = 0;
2884 used_p = 1;
2885 }
2886
2887 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2888 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2889 }
2890
2891 /* List of replacements made below in purge_addressof_1 when creating
2892 bitfield insertions. */
2893 static rtx purge_bitfield_addressof_replacements;
2894
2895 /* List of replacements made below in purge_addressof_1 for patterns
2896 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2897 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2898 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2899 enough in complex cases, e.g. when some field values can be
2900 extracted by usage MEM with narrower mode. */
2901 static rtx purge_addressof_replacements;
2902
2903 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2904 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2905 the stack. If the function returns FALSE then the replacement could not
2906 be made. */
2907
2908 static boolean
2909 purge_addressof_1 (loc, insn, force, store, ht)
2910 rtx *loc;
2911 rtx insn;
2912 int force, store;
2913 struct hash_table *ht;
2914 {
2915 rtx x;
2916 RTX_CODE code;
2917 int i, j;
2918 const char *fmt;
2919 boolean result = true;
2920
2921 /* Re-start here to avoid recursion in common cases. */
2922 restart:
2923
2924 x = *loc;
2925 if (x == 0)
2926 return true;
2927
2928 code = GET_CODE (x);
2929
2930 /* If we don't return in any of the cases below, we will recurse inside
2931 the RTX, which will normally result in any ADDRESSOF being forced into
2932 memory. */
2933 if (code == SET)
2934 {
2935 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2936 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2937 return result;
2938 }
2939
2940 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2941 {
2942 /* We must create a copy of the rtx because it was created by
2943 overwriting a REG rtx which is always shared. */
2944 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2945 rtx insns;
2946
2947 if (validate_change (insn, loc, sub, 0)
2948 || validate_replace_rtx (x, sub, insn))
2949 return true;
2950
2951 start_sequence ();
2952 sub = force_operand (sub, NULL_RTX);
2953 if (! validate_change (insn, loc, sub, 0)
2954 && ! validate_replace_rtx (x, sub, insn))
2955 abort ();
2956
2957 insns = gen_sequence ();
2958 end_sequence ();
2959 emit_insn_before (insns, insn);
2960 return true;
2961 }
2962
2963 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2964 {
2965 rtx sub = XEXP (XEXP (x, 0), 0);
2966 rtx sub2;
2967
2968 if (GET_CODE (sub) == MEM)
2969 {
2970 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2971 MEM_COPY_ATTRIBUTES (sub2, sub);
2972 sub = sub2;
2973 }
2974 else if (GET_CODE (sub) == REG
2975 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2976 ;
2977 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2978 {
2979 int size_x, size_sub;
2980
2981 if (!insn)
2982 {
2983 /* When processing REG_NOTES look at the list of
2984 replacements done on the insn to find the register that X
2985 was replaced by. */
2986 rtx tem;
2987
2988 for (tem = purge_bitfield_addressof_replacements;
2989 tem != NULL_RTX;
2990 tem = XEXP (XEXP (tem, 1), 1))
2991 if (rtx_equal_p (x, XEXP (tem, 0)))
2992 {
2993 *loc = XEXP (XEXP (tem, 1), 0);
2994 return true;
2995 }
2996
2997 /* See comment for purge_addressof_replacements. */
2998 for (tem = purge_addressof_replacements;
2999 tem != NULL_RTX;
3000 tem = XEXP (XEXP (tem, 1), 1))
3001 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3002 {
3003 rtx z = XEXP (XEXP (tem, 1), 0);
3004
3005 if (GET_MODE (x) == GET_MODE (z)
3006 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3007 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3008 abort ();
3009
3010 /* It can happen that the note may speak of things
3011 in a wider (or just different) mode than the
3012 code did. This is especially true of
3013 REG_RETVAL. */
3014
3015 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
3016 z = SUBREG_REG (z);
3017
3018 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3019 && (GET_MODE_SIZE (GET_MODE (x))
3020 > GET_MODE_SIZE (GET_MODE (z))))
3021 {
3022 /* This can occur as a result in invalid
3023 pointer casts, e.g. float f; ...
3024 *(long long int *)&f.
3025 ??? We could emit a warning here, but
3026 without a line number that wouldn't be
3027 very helpful. */
3028 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3029 }
3030 else
3031 z = gen_lowpart (GET_MODE (x), z);
3032
3033 *loc = z;
3034 return true;
3035 }
3036
3037 /* Sometimes we may not be able to find the replacement. For
3038 example when the original insn was a MEM in a wider mode,
3039 and the note is part of a sign extension of a narrowed
3040 version of that MEM. Gcc testcase compile/990829-1.c can
3041 generate an example of this siutation. Rather than complain
3042 we return false, which will prompt our caller to remove the
3043 offending note. */
3044 return false;
3045 }
3046
3047 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3048 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3049
3050 /* Don't even consider working with paradoxical subregs,
3051 or the moral equivalent seen here. */
3052 if (size_x <= size_sub
3053 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3054 {
3055 /* Do a bitfield insertion to mirror what would happen
3056 in memory. */
3057
3058 rtx val, seq;
3059
3060 if (store)
3061 {
3062 rtx p = PREV_INSN (insn);
3063
3064 start_sequence ();
3065 val = gen_reg_rtx (GET_MODE (x));
3066 if (! validate_change (insn, loc, val, 0))
3067 {
3068 /* Discard the current sequence and put the
3069 ADDRESSOF on stack. */
3070 end_sequence ();
3071 goto give_up;
3072 }
3073 seq = gen_sequence ();
3074 end_sequence ();
3075 emit_insn_before (seq, insn);
3076 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3077 insn, ht);
3078
3079 start_sequence ();
3080 store_bit_field (sub, size_x, 0, GET_MODE (x),
3081 val, GET_MODE_SIZE (GET_MODE (sub)),
3082 GET_MODE_ALIGNMENT (GET_MODE (sub)));
3083
3084 /* Make sure to unshare any shared rtl that store_bit_field
3085 might have created. */
3086 unshare_all_rtl_again (get_insns ());
3087
3088 seq = gen_sequence ();
3089 end_sequence ();
3090 p = emit_insn_after (seq, insn);
3091 if (NEXT_INSN (insn))
3092 compute_insns_for_mem (NEXT_INSN (insn),
3093 p ? NEXT_INSN (p) : NULL_RTX,
3094 ht);
3095 }
3096 else
3097 {
3098 rtx p = PREV_INSN (insn);
3099
3100 start_sequence ();
3101 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3102 GET_MODE (x), GET_MODE (x),
3103 GET_MODE_SIZE (GET_MODE (sub)),
3104 GET_MODE_SIZE (GET_MODE (sub)));
3105
3106 if (! validate_change (insn, loc, val, 0))
3107 {
3108 /* Discard the current sequence and put the
3109 ADDRESSOF on stack. */
3110 end_sequence ();
3111 goto give_up;
3112 }
3113
3114 seq = gen_sequence ();
3115 end_sequence ();
3116 emit_insn_before (seq, insn);
3117 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3118 insn, ht);
3119 }
3120
3121 /* Remember the replacement so that the same one can be done
3122 on the REG_NOTES. */
3123 purge_bitfield_addressof_replacements
3124 = gen_rtx_EXPR_LIST (VOIDmode, x,
3125 gen_rtx_EXPR_LIST
3126 (VOIDmode, val,
3127 purge_bitfield_addressof_replacements));
3128
3129 /* We replaced with a reg -- all done. */
3130 return true;
3131 }
3132 }
3133
3134 else if (validate_change (insn, loc, sub, 0))
3135 {
3136 /* Remember the replacement so that the same one can be done
3137 on the REG_NOTES. */
3138 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3139 {
3140 rtx tem;
3141
3142 for (tem = purge_addressof_replacements;
3143 tem != NULL_RTX;
3144 tem = XEXP (XEXP (tem, 1), 1))
3145 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3146 {
3147 XEXP (XEXP (tem, 1), 0) = sub;
3148 return true;
3149 }
3150 purge_addressof_replacements
3151 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3152 gen_rtx_EXPR_LIST (VOIDmode, sub,
3153 purge_addressof_replacements));
3154 return true;
3155 }
3156 goto restart;
3157 }
3158 give_up:;
3159 /* else give up and put it into the stack */
3160 }
3161
3162 else if (code == ADDRESSOF)
3163 {
3164 put_addressof_into_stack (x, ht);
3165 return true;
3166 }
3167 else if (code == SET)
3168 {
3169 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3170 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3171 return result;
3172 }
3173
3174 /* Scan all subexpressions. */
3175 fmt = GET_RTX_FORMAT (code);
3176 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3177 {
3178 if (*fmt == 'e')
3179 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3180 else if (*fmt == 'E')
3181 for (j = 0; j < XVECLEN (x, i); j++)
3182 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3183 }
3184
3185 return result;
3186 }
3187
3188 /* Return a new hash table entry in HT. */
3189
3190 static struct hash_entry *
3191 insns_for_mem_newfunc (he, ht, k)
3192 struct hash_entry *he;
3193 struct hash_table *ht;
3194 hash_table_key k ATTRIBUTE_UNUSED;
3195 {
3196 struct insns_for_mem_entry *ifmhe;
3197 if (he)
3198 return he;
3199
3200 ifmhe = ((struct insns_for_mem_entry *)
3201 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3202 ifmhe->insns = NULL_RTX;
3203
3204 return &ifmhe->he;
3205 }
3206
3207 /* Return a hash value for K, a REG. */
3208
3209 static unsigned long
3210 insns_for_mem_hash (k)
3211 hash_table_key k;
3212 {
3213 /* K is really a RTX. Just use the address as the hash value. */
3214 return (unsigned long) k;
3215 }
3216
3217 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3218
3219 static boolean
3220 insns_for_mem_comp (k1, k2)
3221 hash_table_key k1;
3222 hash_table_key k2;
3223 {
3224 return k1 == k2;
3225 }
3226
3227 struct insns_for_mem_walk_info {
3228 /* The hash table that we are using to record which INSNs use which
3229 MEMs. */
3230 struct hash_table *ht;
3231
3232 /* The INSN we are currently proessing. */
3233 rtx insn;
3234
3235 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3236 to find the insns that use the REGs in the ADDRESSOFs. */
3237 int pass;
3238 };
3239
3240 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3241 that might be used in an ADDRESSOF expression, record this INSN in
3242 the hash table given by DATA (which is really a pointer to an
3243 insns_for_mem_walk_info structure). */
3244
3245 static int
3246 insns_for_mem_walk (r, data)
3247 rtx *r;
3248 void *data;
3249 {
3250 struct insns_for_mem_walk_info *ifmwi
3251 = (struct insns_for_mem_walk_info *) data;
3252
3253 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3254 && GET_CODE (XEXP (*r, 0)) == REG)
3255 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3256 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3257 {
3258 /* Lookup this MEM in the hashtable, creating it if necessary. */
3259 struct insns_for_mem_entry *ifme
3260 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3261 *r,
3262 /*create=*/0,
3263 /*copy=*/0);
3264
3265 /* If we have not already recorded this INSN, do so now. Since
3266 we process the INSNs in order, we know that if we have
3267 recorded it it must be at the front of the list. */
3268 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3269 {
3270 /* We do the allocation on the same obstack as is used for
3271 the hash table since this memory will not be used once
3272 the hash table is deallocated. */
3273 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3274 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3275 ifme->insns);
3276 pop_obstacks ();
3277 }
3278 }
3279
3280 return 0;
3281 }
3282
3283 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3284 which REGs in HT. */
3285
3286 static void
3287 compute_insns_for_mem (insns, last_insn, ht)
3288 rtx insns;
3289 rtx last_insn;
3290 struct hash_table *ht;
3291 {
3292 rtx insn;
3293 struct insns_for_mem_walk_info ifmwi;
3294 ifmwi.ht = ht;
3295
3296 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3297 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3298 if (INSN_P (insn))
3299 {
3300 ifmwi.insn = insn;
3301 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3302 }
3303 }
3304
3305 /* Helper function for purge_addressof called through for_each_rtx.
3306 Returns true iff the rtl is an ADDRESSOF. */
3307 static int
3308 is_addressof (rtl, data)
3309 rtx *rtl;
3310 void *data ATTRIBUTE_UNUSED;
3311 {
3312 return GET_CODE (*rtl) == ADDRESSOF;
3313 }
3314
3315 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3316 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3317 stack. */
3318
3319 void
3320 purge_addressof (insns)
3321 rtx insns;
3322 {
3323 rtx insn;
3324 struct hash_table ht;
3325
3326 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3327 requires a fixup pass over the instruction stream to correct
3328 INSNs that depended on the REG being a REG, and not a MEM. But,
3329 these fixup passes are slow. Furthermore, most MEMs are not
3330 mentioned in very many instructions. So, we speed up the process
3331 by pre-calculating which REGs occur in which INSNs; that allows
3332 us to perform the fixup passes much more quickly. */
3333 hash_table_init (&ht,
3334 insns_for_mem_newfunc,
3335 insns_for_mem_hash,
3336 insns_for_mem_comp);
3337 compute_insns_for_mem (insns, NULL_RTX, &ht);
3338
3339 for (insn = insns; insn; insn = NEXT_INSN (insn))
3340 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3341 || GET_CODE (insn) == CALL_INSN)
3342 {
3343 if (! purge_addressof_1 (&PATTERN (insn), insn,
3344 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3345 /* If we could not replace the ADDRESSOFs in the insn,
3346 something is wrong. */
3347 abort ();
3348
3349 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3350 {
3351 /* If we could not replace the ADDRESSOFs in the insn's notes,
3352 we can just remove the offending notes instead. */
3353 rtx note;
3354
3355 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3356 {
3357 /* If we find a REG_RETVAL note then the insn is a libcall.
3358 Such insns must have REG_EQUAL notes as well, in order
3359 for later passes of the compiler to work. So it is not
3360 safe to delete the notes here, and instead we abort. */
3361 if (REG_NOTE_KIND (note) == REG_RETVAL)
3362 abort ();
3363 if (for_each_rtx (&note, is_addressof, NULL))
3364 remove_note (insn, note);
3365 }
3366 }
3367 }
3368
3369 /* Clean up. */
3370 hash_table_free (&ht);
3371 purge_bitfield_addressof_replacements = 0;
3372 purge_addressof_replacements = 0;
3373
3374 /* REGs are shared. purge_addressof will destructively replace a REG
3375 with a MEM, which creates shared MEMs.
3376
3377 Unfortunately, the children of put_reg_into_stack assume that MEMs
3378 referring to the same stack slot are shared (fixup_var_refs and
3379 the associated hash table code).
3380
3381 So, we have to do another unsharing pass after we have flushed any
3382 REGs that had their address taken into the stack.
3383
3384 It may be worth tracking whether or not we converted any REGs into
3385 MEMs to avoid this overhead when it is not needed. */
3386 unshare_all_rtl_again (get_insns ());
3387 }
3388 \f
3389 /* Pass through the INSNS of function FNDECL and convert virtual register
3390 references to hard register references. */
3391
3392 void
3393 instantiate_virtual_regs (fndecl, insns)
3394 tree fndecl;
3395 rtx insns;
3396 {
3397 rtx insn;
3398 unsigned int i;
3399
3400 /* Compute the offsets to use for this function. */
3401 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3402 var_offset = STARTING_FRAME_OFFSET;
3403 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3404 out_arg_offset = STACK_POINTER_OFFSET;
3405 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3406
3407 /* Scan all variables and parameters of this function. For each that is
3408 in memory, instantiate all virtual registers if the result is a valid
3409 address. If not, we do it later. That will handle most uses of virtual
3410 regs on many machines. */
3411 instantiate_decls (fndecl, 1);
3412
3413 /* Initialize recognition, indicating that volatile is OK. */
3414 init_recog ();
3415
3416 /* Scan through all the insns, instantiating every virtual register still
3417 present. */
3418 for (insn = insns; insn; insn = NEXT_INSN (insn))
3419 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3420 || GET_CODE (insn) == CALL_INSN)
3421 {
3422 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3423 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3424 }
3425
3426 /* Instantiate the stack slots for the parm registers, for later use in
3427 addressof elimination. */
3428 for (i = 0; i < max_parm_reg; ++i)
3429 if (parm_reg_stack_loc[i])
3430 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3431
3432 /* Now instantiate the remaining register equivalences for debugging info.
3433 These will not be valid addresses. */
3434 instantiate_decls (fndecl, 0);
3435
3436 /* Indicate that, from now on, assign_stack_local should use
3437 frame_pointer_rtx. */
3438 virtuals_instantiated = 1;
3439 }
3440
3441 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3442 all virtual registers in their DECL_RTL's.
3443
3444 If VALID_ONLY, do this only if the resulting address is still valid.
3445 Otherwise, always do it. */
3446
3447 static void
3448 instantiate_decls (fndecl, valid_only)
3449 tree fndecl;
3450 int valid_only;
3451 {
3452 tree decl;
3453
3454 if (DECL_SAVED_INSNS (fndecl))
3455 /* When compiling an inline function, the obstack used for
3456 rtl allocation is the maybepermanent_obstack. Calling
3457 `resume_temporary_allocation' switches us back to that
3458 obstack while we process this function's parameters. */
3459 resume_temporary_allocation ();
3460
3461 /* Process all parameters of the function. */
3462 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3463 {
3464 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3465
3466 instantiate_decl (DECL_RTL (decl), size, valid_only);
3467
3468 /* If the parameter was promoted, then the incoming RTL mode may be
3469 larger than the declared type size. We must use the larger of
3470 the two sizes. */
3471 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3472 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3473 }
3474
3475 /* Now process all variables defined in the function or its subblocks. */
3476 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3477
3478 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3479 {
3480 /* Save all rtl allocated for this function by raising the
3481 high-water mark on the maybepermanent_obstack. */
3482 preserve_data ();
3483 /* All further rtl allocation is now done in the current_obstack. */
3484 rtl_in_current_obstack ();
3485 }
3486 }
3487
3488 /* Subroutine of instantiate_decls: Process all decls in the given
3489 BLOCK node and all its subblocks. */
3490
3491 static void
3492 instantiate_decls_1 (let, valid_only)
3493 tree let;
3494 int valid_only;
3495 {
3496 tree t;
3497
3498 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3499 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3500 valid_only);
3501
3502 /* Process all subblocks. */
3503 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3504 instantiate_decls_1 (t, valid_only);
3505 }
3506
3507 /* Subroutine of the preceding procedures: Given RTL representing a
3508 decl and the size of the object, do any instantiation required.
3509
3510 If VALID_ONLY is non-zero, it means that the RTL should only be
3511 changed if the new address is valid. */
3512
3513 static void
3514 instantiate_decl (x, size, valid_only)
3515 rtx x;
3516 HOST_WIDE_INT size;
3517 int valid_only;
3518 {
3519 enum machine_mode mode;
3520 rtx addr;
3521
3522 /* If this is not a MEM, no need to do anything. Similarly if the
3523 address is a constant or a register that is not a virtual register. */
3524
3525 if (x == 0 || GET_CODE (x) != MEM)
3526 return;
3527
3528 addr = XEXP (x, 0);
3529 if (CONSTANT_P (addr)
3530 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3531 || (GET_CODE (addr) == REG
3532 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3533 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3534 return;
3535
3536 /* If we should only do this if the address is valid, copy the address.
3537 We need to do this so we can undo any changes that might make the
3538 address invalid. This copy is unfortunate, but probably can't be
3539 avoided. */
3540
3541 if (valid_only)
3542 addr = copy_rtx (addr);
3543
3544 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3545
3546 if (valid_only && size >= 0)
3547 {
3548 unsigned HOST_WIDE_INT decl_size = size;
3549
3550 /* Now verify that the resulting address is valid for every integer or
3551 floating-point mode up to and including SIZE bytes long. We do this
3552 since the object might be accessed in any mode and frame addresses
3553 are shared. */
3554
3555 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3556 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3557 mode = GET_MODE_WIDER_MODE (mode))
3558 if (! memory_address_p (mode, addr))
3559 return;
3560
3561 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3562 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3563 mode = GET_MODE_WIDER_MODE (mode))
3564 if (! memory_address_p (mode, addr))
3565 return;
3566 }
3567
3568 /* Put back the address now that we have updated it and we either know
3569 it is valid or we don't care whether it is valid. */
3570
3571 XEXP (x, 0) = addr;
3572 }
3573 \f
3574 /* Given a pointer to a piece of rtx and an optional pointer to the
3575 containing object, instantiate any virtual registers present in it.
3576
3577 If EXTRA_INSNS, we always do the replacement and generate
3578 any extra insns before OBJECT. If it zero, we do nothing if replacement
3579 is not valid.
3580
3581 Return 1 if we either had nothing to do or if we were able to do the
3582 needed replacement. Return 0 otherwise; we only return zero if
3583 EXTRA_INSNS is zero.
3584
3585 We first try some simple transformations to avoid the creation of extra
3586 pseudos. */
3587
3588 static int
3589 instantiate_virtual_regs_1 (loc, object, extra_insns)
3590 rtx *loc;
3591 rtx object;
3592 int extra_insns;
3593 {
3594 rtx x;
3595 RTX_CODE code;
3596 rtx new = 0;
3597 HOST_WIDE_INT offset = 0;
3598 rtx temp;
3599 rtx seq;
3600 int i, j;
3601 const char *fmt;
3602
3603 /* Re-start here to avoid recursion in common cases. */
3604 restart:
3605
3606 x = *loc;
3607 if (x == 0)
3608 return 1;
3609
3610 code = GET_CODE (x);
3611
3612 /* Check for some special cases. */
3613 switch (code)
3614 {
3615 case CONST_INT:
3616 case CONST_DOUBLE:
3617 case CONST:
3618 case SYMBOL_REF:
3619 case CODE_LABEL:
3620 case PC:
3621 case CC0:
3622 case ASM_INPUT:
3623 case ADDR_VEC:
3624 case ADDR_DIFF_VEC:
3625 case RETURN:
3626 return 1;
3627
3628 case SET:
3629 /* We are allowed to set the virtual registers. This means that
3630 the actual register should receive the source minus the
3631 appropriate offset. This is used, for example, in the handling
3632 of non-local gotos. */
3633 if (SET_DEST (x) == virtual_incoming_args_rtx)
3634 new = arg_pointer_rtx, offset = -in_arg_offset;
3635 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3636 new = frame_pointer_rtx, offset = -var_offset;
3637 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3638 new = stack_pointer_rtx, offset = -dynamic_offset;
3639 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3640 new = stack_pointer_rtx, offset = -out_arg_offset;
3641 else if (SET_DEST (x) == virtual_cfa_rtx)
3642 new = arg_pointer_rtx, offset = -cfa_offset;
3643
3644 if (new)
3645 {
3646 rtx src = SET_SRC (x);
3647
3648 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3649
3650 /* The only valid sources here are PLUS or REG. Just do
3651 the simplest possible thing to handle them. */
3652 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3653 abort ();
3654
3655 start_sequence ();
3656 if (GET_CODE (src) != REG)
3657 temp = force_operand (src, NULL_RTX);
3658 else
3659 temp = src;
3660 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3661 seq = get_insns ();
3662 end_sequence ();
3663
3664 emit_insns_before (seq, object);
3665 SET_DEST (x) = new;
3666
3667 if (! validate_change (object, &SET_SRC (x), temp, 0)
3668 || ! extra_insns)
3669 abort ();
3670
3671 return 1;
3672 }
3673
3674 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3675 loc = &SET_SRC (x);
3676 goto restart;
3677
3678 case PLUS:
3679 /* Handle special case of virtual register plus constant. */
3680 if (CONSTANT_P (XEXP (x, 1)))
3681 {
3682 rtx old, new_offset;
3683
3684 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3685 if (GET_CODE (XEXP (x, 0)) == PLUS)
3686 {
3687 rtx inner = XEXP (XEXP (x, 0), 0);
3688
3689 if (inner == virtual_incoming_args_rtx)
3690 new = arg_pointer_rtx, offset = in_arg_offset;
3691 else if (inner == virtual_stack_vars_rtx)
3692 new = frame_pointer_rtx, offset = var_offset;
3693 else if (inner == virtual_stack_dynamic_rtx)
3694 new = stack_pointer_rtx, offset = dynamic_offset;
3695 else if (inner == virtual_outgoing_args_rtx)
3696 new = stack_pointer_rtx, offset = out_arg_offset;
3697 else if (inner == virtual_cfa_rtx)
3698 new = arg_pointer_rtx, offset = cfa_offset;
3699 else
3700 {
3701 loc = &XEXP (x, 0);
3702 goto restart;
3703 }
3704
3705 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3706 extra_insns);
3707 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3708 }
3709
3710 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3711 new = arg_pointer_rtx, offset = in_arg_offset;
3712 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3713 new = frame_pointer_rtx, offset = var_offset;
3714 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3715 new = stack_pointer_rtx, offset = dynamic_offset;
3716 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3717 new = stack_pointer_rtx, offset = out_arg_offset;
3718 else if (XEXP (x, 0) == virtual_cfa_rtx)
3719 new = arg_pointer_rtx, offset = cfa_offset;
3720 else
3721 {
3722 /* We know the second operand is a constant. Unless the
3723 first operand is a REG (which has been already checked),
3724 it needs to be checked. */
3725 if (GET_CODE (XEXP (x, 0)) != REG)
3726 {
3727 loc = &XEXP (x, 0);
3728 goto restart;
3729 }
3730 return 1;
3731 }
3732
3733 new_offset = plus_constant (XEXP (x, 1), offset);
3734
3735 /* If the new constant is zero, try to replace the sum with just
3736 the register. */
3737 if (new_offset == const0_rtx
3738 && validate_change (object, loc, new, 0))
3739 return 1;
3740
3741 /* Next try to replace the register and new offset.
3742 There are two changes to validate here and we can't assume that
3743 in the case of old offset equals new just changing the register
3744 will yield a valid insn. In the interests of a little efficiency,
3745 however, we only call validate change once (we don't queue up the
3746 changes and then call apply_change_group). */
3747
3748 old = XEXP (x, 0);
3749 if (offset == 0
3750 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3751 : (XEXP (x, 0) = new,
3752 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3753 {
3754 if (! extra_insns)
3755 {
3756 XEXP (x, 0) = old;
3757 return 0;
3758 }
3759
3760 /* Otherwise copy the new constant into a register and replace
3761 constant with that register. */
3762 temp = gen_reg_rtx (Pmode);
3763 XEXP (x, 0) = new;
3764 if (validate_change (object, &XEXP (x, 1), temp, 0))
3765 emit_insn_before (gen_move_insn (temp, new_offset), object);
3766 else
3767 {
3768 /* If that didn't work, replace this expression with a
3769 register containing the sum. */
3770
3771 XEXP (x, 0) = old;
3772 new = gen_rtx_PLUS (Pmode, new, new_offset);
3773
3774 start_sequence ();
3775 temp = force_operand (new, NULL_RTX);
3776 seq = get_insns ();
3777 end_sequence ();
3778
3779 emit_insns_before (seq, object);
3780 if (! validate_change (object, loc, temp, 0)
3781 && ! validate_replace_rtx (x, temp, object))
3782 abort ();
3783 }
3784 }
3785
3786 return 1;
3787 }
3788
3789 /* Fall through to generic two-operand expression case. */
3790 case EXPR_LIST:
3791 case CALL:
3792 case COMPARE:
3793 case MINUS:
3794 case MULT:
3795 case DIV: case UDIV:
3796 case MOD: case UMOD:
3797 case AND: case IOR: case XOR:
3798 case ROTATERT: case ROTATE:
3799 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3800 case NE: case EQ:
3801 case GE: case GT: case GEU: case GTU:
3802 case LE: case LT: case LEU: case LTU:
3803 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3804 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3805 loc = &XEXP (x, 0);
3806 goto restart;
3807
3808 case MEM:
3809 /* Most cases of MEM that convert to valid addresses have already been
3810 handled by our scan of decls. The only special handling we
3811 need here is to make a copy of the rtx to ensure it isn't being
3812 shared if we have to change it to a pseudo.
3813
3814 If the rtx is a simple reference to an address via a virtual register,
3815 it can potentially be shared. In such cases, first try to make it
3816 a valid address, which can also be shared. Otherwise, copy it and
3817 proceed normally.
3818
3819 First check for common cases that need no processing. These are
3820 usually due to instantiation already being done on a previous instance
3821 of a shared rtx. */
3822
3823 temp = XEXP (x, 0);
3824 if (CONSTANT_ADDRESS_P (temp)
3825 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3826 || temp == arg_pointer_rtx
3827 #endif
3828 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3829 || temp == hard_frame_pointer_rtx
3830 #endif
3831 || temp == frame_pointer_rtx)
3832 return 1;
3833
3834 if (GET_CODE (temp) == PLUS
3835 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3836 && (XEXP (temp, 0) == frame_pointer_rtx
3837 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3838 || XEXP (temp, 0) == hard_frame_pointer_rtx
3839 #endif
3840 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3841 || XEXP (temp, 0) == arg_pointer_rtx
3842 #endif
3843 ))
3844 return 1;
3845
3846 if (temp == virtual_stack_vars_rtx
3847 || temp == virtual_incoming_args_rtx
3848 || (GET_CODE (temp) == PLUS
3849 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3850 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3851 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3852 {
3853 /* This MEM may be shared. If the substitution can be done without
3854 the need to generate new pseudos, we want to do it in place
3855 so all copies of the shared rtx benefit. The call below will
3856 only make substitutions if the resulting address is still
3857 valid.
3858
3859 Note that we cannot pass X as the object in the recursive call
3860 since the insn being processed may not allow all valid
3861 addresses. However, if we were not passed on object, we can
3862 only modify X without copying it if X will have a valid
3863 address.
3864
3865 ??? Also note that this can still lose if OBJECT is an insn that
3866 has less restrictions on an address that some other insn.
3867 In that case, we will modify the shared address. This case
3868 doesn't seem very likely, though. One case where this could
3869 happen is in the case of a USE or CLOBBER reference, but we
3870 take care of that below. */
3871
3872 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3873 object ? object : x, 0))
3874 return 1;
3875
3876 /* Otherwise make a copy and process that copy. We copy the entire
3877 RTL expression since it might be a PLUS which could also be
3878 shared. */
3879 *loc = x = copy_rtx (x);
3880 }
3881
3882 /* Fall through to generic unary operation case. */
3883 case SUBREG:
3884 case STRICT_LOW_PART:
3885 case NEG: case NOT:
3886 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3887 case SIGN_EXTEND: case ZERO_EXTEND:
3888 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3889 case FLOAT: case FIX:
3890 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3891 case ABS:
3892 case SQRT:
3893 case FFS:
3894 /* These case either have just one operand or we know that we need not
3895 check the rest of the operands. */
3896 loc = &XEXP (x, 0);
3897 goto restart;
3898
3899 case USE:
3900 case CLOBBER:
3901 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3902 go ahead and make the invalid one, but do it to a copy. For a REG,
3903 just make the recursive call, since there's no chance of a problem. */
3904
3905 if ((GET_CODE (XEXP (x, 0)) == MEM
3906 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3907 0))
3908 || (GET_CODE (XEXP (x, 0)) == REG
3909 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3910 return 1;
3911
3912 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3913 loc = &XEXP (x, 0);
3914 goto restart;
3915
3916 case REG:
3917 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3918 in front of this insn and substitute the temporary. */
3919 if (x == virtual_incoming_args_rtx)
3920 new = arg_pointer_rtx, offset = in_arg_offset;
3921 else if (x == virtual_stack_vars_rtx)
3922 new = frame_pointer_rtx, offset = var_offset;
3923 else if (x == virtual_stack_dynamic_rtx)
3924 new = stack_pointer_rtx, offset = dynamic_offset;
3925 else if (x == virtual_outgoing_args_rtx)
3926 new = stack_pointer_rtx, offset = out_arg_offset;
3927 else if (x == virtual_cfa_rtx)
3928 new = arg_pointer_rtx, offset = cfa_offset;
3929
3930 if (new)
3931 {
3932 temp = plus_constant (new, offset);
3933 if (!validate_change (object, loc, temp, 0))
3934 {
3935 if (! extra_insns)
3936 return 0;
3937
3938 start_sequence ();
3939 temp = force_operand (temp, NULL_RTX);
3940 seq = get_insns ();
3941 end_sequence ();
3942
3943 emit_insns_before (seq, object);
3944 if (! validate_change (object, loc, temp, 0)
3945 && ! validate_replace_rtx (x, temp, object))
3946 abort ();
3947 }
3948 }
3949
3950 return 1;
3951
3952 case ADDRESSOF:
3953 if (GET_CODE (XEXP (x, 0)) == REG)
3954 return 1;
3955
3956 else if (GET_CODE (XEXP (x, 0)) == MEM)
3957 {
3958 /* If we have a (addressof (mem ..)), do any instantiation inside
3959 since we know we'll be making the inside valid when we finally
3960 remove the ADDRESSOF. */
3961 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3962 return 1;
3963 }
3964 break;
3965
3966 default:
3967 break;
3968 }
3969
3970 /* Scan all subexpressions. */
3971 fmt = GET_RTX_FORMAT (code);
3972 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3973 if (*fmt == 'e')
3974 {
3975 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3976 return 0;
3977 }
3978 else if (*fmt == 'E')
3979 for (j = 0; j < XVECLEN (x, i); j++)
3980 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3981 extra_insns))
3982 return 0;
3983
3984 return 1;
3985 }
3986 \f
3987 /* Optimization: assuming this function does not receive nonlocal gotos,
3988 delete the handlers for such, as well as the insns to establish
3989 and disestablish them. */
3990
3991 static void
3992 delete_handlers ()
3993 {
3994 rtx insn;
3995 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3996 {
3997 /* Delete the handler by turning off the flag that would
3998 prevent jump_optimize from deleting it.
3999 Also permit deletion of the nonlocal labels themselves
4000 if nothing local refers to them. */
4001 if (GET_CODE (insn) == CODE_LABEL)
4002 {
4003 tree t, last_t;
4004
4005 LABEL_PRESERVE_P (insn) = 0;
4006
4007 /* Remove it from the nonlocal_label list, to avoid confusing
4008 flow. */
4009 for (t = nonlocal_labels, last_t = 0; t;
4010 last_t = t, t = TREE_CHAIN (t))
4011 if (DECL_RTL (TREE_VALUE (t)) == insn)
4012 break;
4013 if (t)
4014 {
4015 if (! last_t)
4016 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4017 else
4018 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4019 }
4020 }
4021 if (GET_CODE (insn) == INSN)
4022 {
4023 int can_delete = 0;
4024 rtx t;
4025 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4026 if (reg_mentioned_p (t, PATTERN (insn)))
4027 {
4028 can_delete = 1;
4029 break;
4030 }
4031 if (can_delete
4032 || (nonlocal_goto_stack_level != 0
4033 && reg_mentioned_p (nonlocal_goto_stack_level,
4034 PATTERN (insn))))
4035 delete_insn (insn);
4036 }
4037 }
4038 }
4039 \f
4040 int
4041 max_parm_reg_num ()
4042 {
4043 return max_parm_reg;
4044 }
4045
4046 /* Return the first insn following those generated by `assign_parms'. */
4047
4048 rtx
4049 get_first_nonparm_insn ()
4050 {
4051 if (last_parm_insn)
4052 return NEXT_INSN (last_parm_insn);
4053 return get_insns ();
4054 }
4055
4056 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4057 Crash if there is none. */
4058
4059 rtx
4060 get_first_block_beg ()
4061 {
4062 register rtx searcher;
4063 register rtx insn = get_first_nonparm_insn ();
4064
4065 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4066 if (GET_CODE (searcher) == NOTE
4067 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4068 return searcher;
4069
4070 abort (); /* Invalid call to this function. (See comments above.) */
4071 return NULL_RTX;
4072 }
4073
4074 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4075 This means a type for which function calls must pass an address to the
4076 function or get an address back from the function.
4077 EXP may be a type node or an expression (whose type is tested). */
4078
4079 int
4080 aggregate_value_p (exp)
4081 tree exp;
4082 {
4083 int i, regno, nregs;
4084 rtx reg;
4085
4086 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4087
4088 if (TREE_CODE (type) == VOID_TYPE)
4089 return 0;
4090 if (RETURN_IN_MEMORY (type))
4091 return 1;
4092 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4093 and thus can't be returned in registers. */
4094 if (TREE_ADDRESSABLE (type))
4095 return 1;
4096 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4097 return 1;
4098 /* Make sure we have suitable call-clobbered regs to return
4099 the value in; if not, we must return it in memory. */
4100 reg = hard_function_value (type, 0, 0);
4101
4102 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4103 it is OK. */
4104 if (GET_CODE (reg) != REG)
4105 return 0;
4106
4107 regno = REGNO (reg);
4108 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4109 for (i = 0; i < nregs; i++)
4110 if (! call_used_regs[regno + i])
4111 return 1;
4112 return 0;
4113 }
4114 \f
4115 /* Assign RTL expressions to the function's parameters.
4116 This may involve copying them into registers and using
4117 those registers as the RTL for them. */
4118
4119 void
4120 assign_parms (fndecl)
4121 tree fndecl;
4122 {
4123 register tree parm;
4124 register rtx entry_parm = 0;
4125 register rtx stack_parm = 0;
4126 CUMULATIVE_ARGS args_so_far;
4127 enum machine_mode promoted_mode, passed_mode;
4128 enum machine_mode nominal_mode, promoted_nominal_mode;
4129 int unsignedp;
4130 /* Total space needed so far for args on the stack,
4131 given as a constant and a tree-expression. */
4132 struct args_size stack_args_size;
4133 tree fntype = TREE_TYPE (fndecl);
4134 tree fnargs = DECL_ARGUMENTS (fndecl);
4135 /* This is used for the arg pointer when referring to stack args. */
4136 rtx internal_arg_pointer;
4137 /* This is a dummy PARM_DECL that we used for the function result if
4138 the function returns a structure. */
4139 tree function_result_decl = 0;
4140 #ifdef SETUP_INCOMING_VARARGS
4141 int varargs_setup = 0;
4142 #endif
4143 rtx conversion_insns = 0;
4144 struct args_size alignment_pad;
4145
4146 /* Nonzero if the last arg is named `__builtin_va_alist',
4147 which is used on some machines for old-fashioned non-ANSI varargs.h;
4148 this should be stuck onto the stack as if it had arrived there. */
4149 int hide_last_arg
4150 = (current_function_varargs
4151 && fnargs
4152 && (parm = tree_last (fnargs)) != 0
4153 && DECL_NAME (parm)
4154 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4155 "__builtin_va_alist")));
4156
4157 /* Nonzero if function takes extra anonymous args.
4158 This means the last named arg must be on the stack
4159 right before the anonymous ones. */
4160 int stdarg
4161 = (TYPE_ARG_TYPES (fntype) != 0
4162 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4163 != void_type_node));
4164
4165 current_function_stdarg = stdarg;
4166
4167 /* If the reg that the virtual arg pointer will be translated into is
4168 not a fixed reg or is the stack pointer, make a copy of the virtual
4169 arg pointer, and address parms via the copy. The frame pointer is
4170 considered fixed even though it is not marked as such.
4171
4172 The second time through, simply use ap to avoid generating rtx. */
4173
4174 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4175 || ! (fixed_regs[ARG_POINTER_REGNUM]
4176 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4177 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4178 else
4179 internal_arg_pointer = virtual_incoming_args_rtx;
4180 current_function_internal_arg_pointer = internal_arg_pointer;
4181
4182 stack_args_size.constant = 0;
4183 stack_args_size.var = 0;
4184
4185 /* If struct value address is treated as the first argument, make it so. */
4186 if (aggregate_value_p (DECL_RESULT (fndecl))
4187 && ! current_function_returns_pcc_struct
4188 && struct_value_incoming_rtx == 0)
4189 {
4190 tree type = build_pointer_type (TREE_TYPE (fntype));
4191
4192 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4193
4194 DECL_ARG_TYPE (function_result_decl) = type;
4195 TREE_CHAIN (function_result_decl) = fnargs;
4196 fnargs = function_result_decl;
4197 }
4198
4199 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4200 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4201
4202 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4203 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4204 #else
4205 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4206 #endif
4207
4208 /* We haven't yet found an argument that we must push and pretend the
4209 caller did. */
4210 current_function_pretend_args_size = 0;
4211
4212 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4213 {
4214 struct args_size stack_offset;
4215 struct args_size arg_size;
4216 int passed_pointer = 0;
4217 int did_conversion = 0;
4218 tree passed_type = DECL_ARG_TYPE (parm);
4219 tree nominal_type = TREE_TYPE (parm);
4220 int pretend_named;
4221
4222 /* Set LAST_NAMED if this is last named arg before some
4223 anonymous args. */
4224 int last_named = ((TREE_CHAIN (parm) == 0
4225 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4226 && (stdarg || current_function_varargs));
4227 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4228 most machines, if this is a varargs/stdarg function, then we treat
4229 the last named arg as if it were anonymous too. */
4230 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4231
4232 if (TREE_TYPE (parm) == error_mark_node
4233 /* This can happen after weird syntax errors
4234 or if an enum type is defined among the parms. */
4235 || TREE_CODE (parm) != PARM_DECL
4236 || passed_type == NULL)
4237 {
4238 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4239 = gen_rtx_MEM (BLKmode, const0_rtx);
4240 TREE_USED (parm) = 1;
4241 continue;
4242 }
4243
4244 /* For varargs.h function, save info about regs and stack space
4245 used by the individual args, not including the va_alist arg. */
4246 if (hide_last_arg && last_named)
4247 current_function_args_info = args_so_far;
4248
4249 /* Find mode of arg as it is passed, and mode of arg
4250 as it should be during execution of this function. */
4251 passed_mode = TYPE_MODE (passed_type);
4252 nominal_mode = TYPE_MODE (nominal_type);
4253
4254 /* If the parm's mode is VOID, its value doesn't matter,
4255 and avoid the usual things like emit_move_insn that could crash. */
4256 if (nominal_mode == VOIDmode)
4257 {
4258 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4259 continue;
4260 }
4261
4262 /* If the parm is to be passed as a transparent union, use the
4263 type of the first field for the tests below. We have already
4264 verified that the modes are the same. */
4265 if (DECL_TRANSPARENT_UNION (parm)
4266 || (TREE_CODE (passed_type) == UNION_TYPE
4267 && TYPE_TRANSPARENT_UNION (passed_type)))
4268 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4269
4270 /* See if this arg was passed by invisible reference. It is if
4271 it is an object whose size depends on the contents of the
4272 object itself or if the machine requires these objects be passed
4273 that way. */
4274
4275 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4276 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4277 || TREE_ADDRESSABLE (passed_type)
4278 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4279 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4280 passed_type, named_arg)
4281 #endif
4282 )
4283 {
4284 passed_type = nominal_type = build_pointer_type (passed_type);
4285 passed_pointer = 1;
4286 passed_mode = nominal_mode = Pmode;
4287 }
4288
4289 promoted_mode = passed_mode;
4290
4291 #ifdef PROMOTE_FUNCTION_ARGS
4292 /* Compute the mode in which the arg is actually extended to. */
4293 unsignedp = TREE_UNSIGNED (passed_type);
4294 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4295 #endif
4296
4297 /* Let machine desc say which reg (if any) the parm arrives in.
4298 0 means it arrives on the stack. */
4299 #ifdef FUNCTION_INCOMING_ARG
4300 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4301 passed_type, named_arg);
4302 #else
4303 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4304 passed_type, named_arg);
4305 #endif
4306
4307 if (entry_parm == 0)
4308 promoted_mode = passed_mode;
4309
4310 #ifdef SETUP_INCOMING_VARARGS
4311 /* If this is the last named parameter, do any required setup for
4312 varargs or stdargs. We need to know about the case of this being an
4313 addressable type, in which case we skip the registers it
4314 would have arrived in.
4315
4316 For stdargs, LAST_NAMED will be set for two parameters, the one that
4317 is actually the last named, and the dummy parameter. We only
4318 want to do this action once.
4319
4320 Also, indicate when RTL generation is to be suppressed. */
4321 if (last_named && !varargs_setup)
4322 {
4323 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4324 current_function_pretend_args_size, 0);
4325 varargs_setup = 1;
4326 }
4327 #endif
4328
4329 /* Determine parm's home in the stack,
4330 in case it arrives in the stack or we should pretend it did.
4331
4332 Compute the stack position and rtx where the argument arrives
4333 and its size.
4334
4335 There is one complexity here: If this was a parameter that would
4336 have been passed in registers, but wasn't only because it is
4337 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4338 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4339 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4340 0 as it was the previous time. */
4341
4342 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4343 locate_and_pad_parm (promoted_mode, passed_type,
4344 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4345 1,
4346 #else
4347 #ifdef FUNCTION_INCOMING_ARG
4348 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4349 passed_type,
4350 pretend_named) != 0,
4351 #else
4352 FUNCTION_ARG (args_so_far, promoted_mode,
4353 passed_type,
4354 pretend_named) != 0,
4355 #endif
4356 #endif
4357 fndecl, &stack_args_size, &stack_offset, &arg_size,
4358 &alignment_pad);
4359
4360 {
4361 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4362
4363 if (offset_rtx == const0_rtx)
4364 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4365 else
4366 stack_parm = gen_rtx_MEM (promoted_mode,
4367 gen_rtx_PLUS (Pmode,
4368 internal_arg_pointer,
4369 offset_rtx));
4370
4371 set_mem_attributes (stack_parm, parm, 1);
4372 }
4373
4374 /* If this parameter was passed both in registers and in the stack,
4375 use the copy on the stack. */
4376 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4377 entry_parm = 0;
4378
4379 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4380 /* If this parm was passed part in regs and part in memory,
4381 pretend it arrived entirely in memory
4382 by pushing the register-part onto the stack.
4383
4384 In the special case of a DImode or DFmode that is split,
4385 we could put it together in a pseudoreg directly,
4386 but for now that's not worth bothering with. */
4387
4388 if (entry_parm)
4389 {
4390 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4391 passed_type, named_arg);
4392
4393 if (nregs > 0)
4394 {
4395 current_function_pretend_args_size
4396 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4397 / (PARM_BOUNDARY / BITS_PER_UNIT)
4398 * (PARM_BOUNDARY / BITS_PER_UNIT));
4399
4400 /* Handle calls that pass values in multiple non-contiguous
4401 locations. The Irix 6 ABI has examples of this. */
4402 if (GET_CODE (entry_parm) == PARALLEL)
4403 emit_group_store (validize_mem (stack_parm), entry_parm,
4404 int_size_in_bytes (TREE_TYPE (parm)),
4405 TYPE_ALIGN (TREE_TYPE (parm)));
4406
4407 else
4408 move_block_from_reg (REGNO (entry_parm),
4409 validize_mem (stack_parm), nregs,
4410 int_size_in_bytes (TREE_TYPE (parm)));
4411
4412 entry_parm = stack_parm;
4413 }
4414 }
4415 #endif
4416
4417 /* If we didn't decide this parm came in a register,
4418 by default it came on the stack. */
4419 if (entry_parm == 0)
4420 entry_parm = stack_parm;
4421
4422 /* Record permanently how this parm was passed. */
4423 DECL_INCOMING_RTL (parm) = entry_parm;
4424
4425 /* If there is actually space on the stack for this parm,
4426 count it in stack_args_size; otherwise set stack_parm to 0
4427 to indicate there is no preallocated stack slot for the parm. */
4428
4429 if (entry_parm == stack_parm
4430 || (GET_CODE (entry_parm) == PARALLEL
4431 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4432 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4433 /* On some machines, even if a parm value arrives in a register
4434 there is still an (uninitialized) stack slot allocated for it.
4435
4436 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4437 whether this parameter already has a stack slot allocated,
4438 because an arg block exists only if current_function_args_size
4439 is larger than some threshold, and we haven't calculated that
4440 yet. So, for now, we just assume that stack slots never exist
4441 in this case. */
4442 || REG_PARM_STACK_SPACE (fndecl) > 0
4443 #endif
4444 )
4445 {
4446 stack_args_size.constant += arg_size.constant;
4447 if (arg_size.var)
4448 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4449 }
4450 else
4451 /* No stack slot was pushed for this parm. */
4452 stack_parm = 0;
4453
4454 /* Update info on where next arg arrives in registers. */
4455
4456 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4457 passed_type, named_arg);
4458
4459 /* If we can't trust the parm stack slot to be aligned enough
4460 for its ultimate type, don't use that slot after entry.
4461 We'll make another stack slot, if we need one. */
4462 {
4463 unsigned int thisparm_boundary
4464 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4465
4466 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4467 stack_parm = 0;
4468 }
4469
4470 /* If parm was passed in memory, and we need to convert it on entry,
4471 don't store it back in that same slot. */
4472 if (entry_parm != 0
4473 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4474 stack_parm = 0;
4475
4476 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4477 in the mode in which it arrives.
4478 STACK_PARM is an RTX for a stack slot where the parameter can live
4479 during the function (in case we want to put it there).
4480 STACK_PARM is 0 if no stack slot was pushed for it.
4481
4482 Now output code if necessary to convert ENTRY_PARM to
4483 the type in which this function declares it,
4484 and store that result in an appropriate place,
4485 which may be a pseudo reg, may be STACK_PARM,
4486 or may be a local stack slot if STACK_PARM is 0.
4487
4488 Set DECL_RTL to that place. */
4489
4490 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4491 {
4492 /* If a BLKmode arrives in registers, copy it to a stack slot.
4493 Handle calls that pass values in multiple non-contiguous
4494 locations. The Irix 6 ABI has examples of this. */
4495 if (GET_CODE (entry_parm) == REG
4496 || GET_CODE (entry_parm) == PARALLEL)
4497 {
4498 int size_stored
4499 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4500 UNITS_PER_WORD);
4501
4502 /* Note that we will be storing an integral number of words.
4503 So we have to be careful to ensure that we allocate an
4504 integral number of words. We do this below in the
4505 assign_stack_local if space was not allocated in the argument
4506 list. If it was, this will not work if PARM_BOUNDARY is not
4507 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4508 if it becomes a problem. */
4509
4510 if (stack_parm == 0)
4511 {
4512 stack_parm
4513 = assign_stack_local (GET_MODE (entry_parm),
4514 size_stored, 0);
4515 set_mem_attributes (stack_parm, parm, 1);
4516 }
4517
4518 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4519 abort ();
4520
4521 /* Handle calls that pass values in multiple non-contiguous
4522 locations. The Irix 6 ABI has examples of this. */
4523 if (GET_CODE (entry_parm) == PARALLEL)
4524 emit_group_store (validize_mem (stack_parm), entry_parm,
4525 int_size_in_bytes (TREE_TYPE (parm)),
4526 TYPE_ALIGN (TREE_TYPE (parm)));
4527 else
4528 move_block_from_reg (REGNO (entry_parm),
4529 validize_mem (stack_parm),
4530 size_stored / UNITS_PER_WORD,
4531 int_size_in_bytes (TREE_TYPE (parm)));
4532 }
4533 DECL_RTL (parm) = stack_parm;
4534 }
4535 else if (! ((! optimize
4536 && ! DECL_REGISTER (parm)
4537 && ! DECL_INLINE (fndecl))
4538 /* layout_decl may set this. */
4539 || TREE_ADDRESSABLE (parm)
4540 || TREE_SIDE_EFFECTS (parm)
4541 /* If -ffloat-store specified, don't put explicit
4542 float variables into registers. */
4543 || (flag_float_store
4544 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4545 /* Always assign pseudo to structure return or item passed
4546 by invisible reference. */
4547 || passed_pointer || parm == function_result_decl)
4548 {
4549 /* Store the parm in a pseudoregister during the function, but we
4550 may need to do it in a wider mode. */
4551
4552 register rtx parmreg;
4553 unsigned int regno, regnoi = 0, regnor = 0;
4554
4555 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4556
4557 promoted_nominal_mode
4558 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4559
4560 parmreg = gen_reg_rtx (promoted_nominal_mode);
4561 mark_user_reg (parmreg);
4562
4563 /* If this was an item that we received a pointer to, set DECL_RTL
4564 appropriately. */
4565 if (passed_pointer)
4566 {
4567 DECL_RTL (parm)
4568 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4569 set_mem_attributes (DECL_RTL (parm), parm, 1);
4570 }
4571 else
4572 DECL_RTL (parm) = parmreg;
4573
4574 /* Copy the value into the register. */
4575 if (nominal_mode != passed_mode
4576 || promoted_nominal_mode != promoted_mode)
4577 {
4578 int save_tree_used;
4579 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4580 mode, by the caller. We now have to convert it to
4581 NOMINAL_MODE, if different. However, PARMREG may be in
4582 a different mode than NOMINAL_MODE if it is being stored
4583 promoted.
4584
4585 If ENTRY_PARM is a hard register, it might be in a register
4586 not valid for operating in its mode (e.g., an odd-numbered
4587 register for a DFmode). In that case, moves are the only
4588 thing valid, so we can't do a convert from there. This
4589 occurs when the calling sequence allow such misaligned
4590 usages.
4591
4592 In addition, the conversion may involve a call, which could
4593 clobber parameters which haven't been copied to pseudo
4594 registers yet. Therefore, we must first copy the parm to
4595 a pseudo reg here, and save the conversion until after all
4596 parameters have been moved. */
4597
4598 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4599
4600 emit_move_insn (tempreg, validize_mem (entry_parm));
4601
4602 push_to_sequence (conversion_insns);
4603 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4604
4605 /* TREE_USED gets set erroneously during expand_assignment. */
4606 save_tree_used = TREE_USED (parm);
4607 expand_assignment (parm,
4608 make_tree (nominal_type, tempreg), 0, 0);
4609 TREE_USED (parm) = save_tree_used;
4610 conversion_insns = get_insns ();
4611 did_conversion = 1;
4612 end_sequence ();
4613 }
4614 else
4615 emit_move_insn (parmreg, validize_mem (entry_parm));
4616
4617 /* If we were passed a pointer but the actual value
4618 can safely live in a register, put it in one. */
4619 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4620 && ! ((! optimize
4621 && ! DECL_REGISTER (parm)
4622 && ! DECL_INLINE (fndecl))
4623 /* layout_decl may set this. */
4624 || TREE_ADDRESSABLE (parm)
4625 || TREE_SIDE_EFFECTS (parm)
4626 /* If -ffloat-store specified, don't put explicit
4627 float variables into registers. */
4628 || (flag_float_store
4629 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4630 {
4631 /* We can't use nominal_mode, because it will have been set to
4632 Pmode above. We must use the actual mode of the parm. */
4633 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4634 mark_user_reg (parmreg);
4635 emit_move_insn (parmreg, DECL_RTL (parm));
4636 DECL_RTL (parm) = parmreg;
4637 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4638 now the parm. */
4639 stack_parm = 0;
4640 }
4641 #ifdef FUNCTION_ARG_CALLEE_COPIES
4642 /* If we are passed an arg by reference and it is our responsibility
4643 to make a copy, do it now.
4644 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4645 original argument, so we must recreate them in the call to
4646 FUNCTION_ARG_CALLEE_COPIES. */
4647 /* ??? Later add code to handle the case that if the argument isn't
4648 modified, don't do the copy. */
4649
4650 else if (passed_pointer
4651 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4652 TYPE_MODE (DECL_ARG_TYPE (parm)),
4653 DECL_ARG_TYPE (parm),
4654 named_arg)
4655 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4656 {
4657 rtx copy;
4658 tree type = DECL_ARG_TYPE (parm);
4659
4660 /* This sequence may involve a library call perhaps clobbering
4661 registers that haven't been copied to pseudos yet. */
4662
4663 push_to_sequence (conversion_insns);
4664
4665 if (!COMPLETE_TYPE_P (type)
4666 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4667 /* This is a variable sized object. */
4668 copy = gen_rtx_MEM (BLKmode,
4669 allocate_dynamic_stack_space
4670 (expr_size (parm), NULL_RTX,
4671 TYPE_ALIGN (type)));
4672 else
4673 copy = assign_stack_temp (TYPE_MODE (type),
4674 int_size_in_bytes (type), 1);
4675 set_mem_attributes (copy, parm, 1);
4676
4677 store_expr (parm, copy, 0);
4678 emit_move_insn (parmreg, XEXP (copy, 0));
4679 if (current_function_check_memory_usage)
4680 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4681 XEXP (copy, 0), Pmode,
4682 GEN_INT (int_size_in_bytes (type)),
4683 TYPE_MODE (sizetype),
4684 GEN_INT (MEMORY_USE_RW),
4685 TYPE_MODE (integer_type_node));
4686 conversion_insns = get_insns ();
4687 did_conversion = 1;
4688 end_sequence ();
4689 }
4690 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4691
4692 /* In any case, record the parm's desired stack location
4693 in case we later discover it must live in the stack.
4694
4695 If it is a COMPLEX value, store the stack location for both
4696 halves. */
4697
4698 if (GET_CODE (parmreg) == CONCAT)
4699 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4700 else
4701 regno = REGNO (parmreg);
4702
4703 if (regno >= max_parm_reg)
4704 {
4705 rtx *new;
4706 int old_max_parm_reg = max_parm_reg;
4707
4708 /* It's slow to expand this one register at a time,
4709 but it's also rare and we need max_parm_reg to be
4710 precisely correct. */
4711 max_parm_reg = regno + 1;
4712 new = (rtx *) xrealloc (parm_reg_stack_loc,
4713 max_parm_reg * sizeof (rtx));
4714 bzero ((char *) (new + old_max_parm_reg),
4715 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4716 parm_reg_stack_loc = new;
4717 }
4718
4719 if (GET_CODE (parmreg) == CONCAT)
4720 {
4721 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4722
4723 regnor = REGNO (gen_realpart (submode, parmreg));
4724 regnoi = REGNO (gen_imagpart (submode, parmreg));
4725
4726 if (stack_parm != 0)
4727 {
4728 parm_reg_stack_loc[regnor]
4729 = gen_realpart (submode, stack_parm);
4730 parm_reg_stack_loc[regnoi]
4731 = gen_imagpart (submode, stack_parm);
4732 }
4733 else
4734 {
4735 parm_reg_stack_loc[regnor] = 0;
4736 parm_reg_stack_loc[regnoi] = 0;
4737 }
4738 }
4739 else
4740 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4741
4742 /* Mark the register as eliminable if we did no conversion
4743 and it was copied from memory at a fixed offset,
4744 and the arg pointer was not copied to a pseudo-reg.
4745 If the arg pointer is a pseudo reg or the offset formed
4746 an invalid address, such memory-equivalences
4747 as we make here would screw up life analysis for it. */
4748 if (nominal_mode == passed_mode
4749 && ! did_conversion
4750 && stack_parm != 0
4751 && GET_CODE (stack_parm) == MEM
4752 && stack_offset.var == 0
4753 && reg_mentioned_p (virtual_incoming_args_rtx,
4754 XEXP (stack_parm, 0)))
4755 {
4756 rtx linsn = get_last_insn ();
4757 rtx sinsn, set;
4758
4759 /* Mark complex types separately. */
4760 if (GET_CODE (parmreg) == CONCAT)
4761 /* Scan backwards for the set of the real and
4762 imaginary parts. */
4763 for (sinsn = linsn; sinsn != 0;
4764 sinsn = prev_nonnote_insn (sinsn))
4765 {
4766 set = single_set (sinsn);
4767 if (set != 0
4768 && SET_DEST (set) == regno_reg_rtx [regnoi])
4769 REG_NOTES (sinsn)
4770 = gen_rtx_EXPR_LIST (REG_EQUIV,
4771 parm_reg_stack_loc[regnoi],
4772 REG_NOTES (sinsn));
4773 else if (set != 0
4774 && SET_DEST (set) == regno_reg_rtx [regnor])
4775 REG_NOTES (sinsn)
4776 = gen_rtx_EXPR_LIST (REG_EQUIV,
4777 parm_reg_stack_loc[regnor],
4778 REG_NOTES (sinsn));
4779 }
4780 else if ((set = single_set (linsn)) != 0
4781 && SET_DEST (set) == parmreg)
4782 REG_NOTES (linsn)
4783 = gen_rtx_EXPR_LIST (REG_EQUIV,
4784 stack_parm, REG_NOTES (linsn));
4785 }
4786
4787 /* For pointer data type, suggest pointer register. */
4788 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4789 mark_reg_pointer (parmreg,
4790 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4791
4792 }
4793 else
4794 {
4795 /* Value must be stored in the stack slot STACK_PARM
4796 during function execution. */
4797
4798 if (promoted_mode != nominal_mode)
4799 {
4800 /* Conversion is required. */
4801 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4802
4803 emit_move_insn (tempreg, validize_mem (entry_parm));
4804
4805 push_to_sequence (conversion_insns);
4806 entry_parm = convert_to_mode (nominal_mode, tempreg,
4807 TREE_UNSIGNED (TREE_TYPE (parm)));
4808 if (stack_parm)
4809 {
4810 /* ??? This may need a big-endian conversion on sparc64. */
4811 stack_parm = change_address (stack_parm, nominal_mode,
4812 NULL_RTX);
4813 }
4814 conversion_insns = get_insns ();
4815 did_conversion = 1;
4816 end_sequence ();
4817 }
4818
4819 if (entry_parm != stack_parm)
4820 {
4821 if (stack_parm == 0)
4822 {
4823 stack_parm
4824 = assign_stack_local (GET_MODE (entry_parm),
4825 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4826 set_mem_attributes (stack_parm, parm, 1);
4827 }
4828
4829 if (promoted_mode != nominal_mode)
4830 {
4831 push_to_sequence (conversion_insns);
4832 emit_move_insn (validize_mem (stack_parm),
4833 validize_mem (entry_parm));
4834 conversion_insns = get_insns ();
4835 end_sequence ();
4836 }
4837 else
4838 emit_move_insn (validize_mem (stack_parm),
4839 validize_mem (entry_parm));
4840 }
4841 if (current_function_check_memory_usage)
4842 {
4843 push_to_sequence (conversion_insns);
4844 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4845 XEXP (stack_parm, 0), Pmode,
4846 GEN_INT (GET_MODE_SIZE (GET_MODE
4847 (entry_parm))),
4848 TYPE_MODE (sizetype),
4849 GEN_INT (MEMORY_USE_RW),
4850 TYPE_MODE (integer_type_node));
4851
4852 conversion_insns = get_insns ();
4853 end_sequence ();
4854 }
4855 DECL_RTL (parm) = stack_parm;
4856 }
4857
4858 /* If this "parameter" was the place where we are receiving the
4859 function's incoming structure pointer, set up the result. */
4860 if (parm == function_result_decl)
4861 {
4862 tree result = DECL_RESULT (fndecl);
4863
4864 DECL_RTL (result)
4865 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4866
4867 set_mem_attributes (DECL_RTL (result), result, 1);
4868 }
4869 }
4870
4871 /* Output all parameter conversion instructions (possibly including calls)
4872 now that all parameters have been copied out of hard registers. */
4873 emit_insns (conversion_insns);
4874
4875 last_parm_insn = get_last_insn ();
4876
4877 current_function_args_size = stack_args_size.constant;
4878
4879 /* Adjust function incoming argument size for alignment and
4880 minimum length. */
4881
4882 #ifdef REG_PARM_STACK_SPACE
4883 #ifndef MAYBE_REG_PARM_STACK_SPACE
4884 current_function_args_size = MAX (current_function_args_size,
4885 REG_PARM_STACK_SPACE (fndecl));
4886 #endif
4887 #endif
4888
4889 #ifdef STACK_BOUNDARY
4890 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4891
4892 current_function_args_size
4893 = ((current_function_args_size + STACK_BYTES - 1)
4894 / STACK_BYTES) * STACK_BYTES;
4895 #endif
4896
4897 #ifdef ARGS_GROW_DOWNWARD
4898 current_function_arg_offset_rtx
4899 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4900 : expand_expr (size_diffop (stack_args_size.var,
4901 size_int (-stack_args_size.constant)),
4902 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4903 #else
4904 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4905 #endif
4906
4907 /* See how many bytes, if any, of its args a function should try to pop
4908 on return. */
4909
4910 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4911 current_function_args_size);
4912
4913 /* For stdarg.h function, save info about
4914 regs and stack space used by the named args. */
4915
4916 if (!hide_last_arg)
4917 current_function_args_info = args_so_far;
4918
4919 /* Set the rtx used for the function return value. Put this in its
4920 own variable so any optimizers that need this information don't have
4921 to include tree.h. Do this here so it gets done when an inlined
4922 function gets output. */
4923
4924 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4925 }
4926 \f
4927 /* Indicate whether REGNO is an incoming argument to the current function
4928 that was promoted to a wider mode. If so, return the RTX for the
4929 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4930 that REGNO is promoted from and whether the promotion was signed or
4931 unsigned. */
4932
4933 #ifdef PROMOTE_FUNCTION_ARGS
4934
4935 rtx
4936 promoted_input_arg (regno, pmode, punsignedp)
4937 unsigned int regno;
4938 enum machine_mode *pmode;
4939 int *punsignedp;
4940 {
4941 tree arg;
4942
4943 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4944 arg = TREE_CHAIN (arg))
4945 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4946 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4947 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4948 {
4949 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4950 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4951
4952 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4953 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4954 && mode != DECL_MODE (arg))
4955 {
4956 *pmode = DECL_MODE (arg);
4957 *punsignedp = unsignedp;
4958 return DECL_INCOMING_RTL (arg);
4959 }
4960 }
4961
4962 return 0;
4963 }
4964
4965 #endif
4966 \f
4967 /* Compute the size and offset from the start of the stacked arguments for a
4968 parm passed in mode PASSED_MODE and with type TYPE.
4969
4970 INITIAL_OFFSET_PTR points to the current offset into the stacked
4971 arguments.
4972
4973 The starting offset and size for this parm are returned in *OFFSET_PTR
4974 and *ARG_SIZE_PTR, respectively.
4975
4976 IN_REGS is non-zero if the argument will be passed in registers. It will
4977 never be set if REG_PARM_STACK_SPACE is not defined.
4978
4979 FNDECL is the function in which the argument was defined.
4980
4981 There are two types of rounding that are done. The first, controlled by
4982 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4983 list to be aligned to the specific boundary (in bits). This rounding
4984 affects the initial and starting offsets, but not the argument size.
4985
4986 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4987 optionally rounds the size of the parm to PARM_BOUNDARY. The
4988 initial offset is not affected by this rounding, while the size always
4989 is and the starting offset may be. */
4990
4991 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4992 initial_offset_ptr is positive because locate_and_pad_parm's
4993 callers pass in the total size of args so far as
4994 initial_offset_ptr. arg_size_ptr is always positive.*/
4995
4996 void
4997 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4998 initial_offset_ptr, offset_ptr, arg_size_ptr,
4999 alignment_pad)
5000 enum machine_mode passed_mode;
5001 tree type;
5002 int in_regs ATTRIBUTE_UNUSED;
5003 tree fndecl ATTRIBUTE_UNUSED;
5004 struct args_size *initial_offset_ptr;
5005 struct args_size *offset_ptr;
5006 struct args_size *arg_size_ptr;
5007 struct args_size *alignment_pad;
5008
5009 {
5010 tree sizetree
5011 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5012 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5013 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5014
5015 #ifdef REG_PARM_STACK_SPACE
5016 /* If we have found a stack parm before we reach the end of the
5017 area reserved for registers, skip that area. */
5018 if (! in_regs)
5019 {
5020 int reg_parm_stack_space = 0;
5021
5022 #ifdef MAYBE_REG_PARM_STACK_SPACE
5023 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5024 #else
5025 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5026 #endif
5027 if (reg_parm_stack_space > 0)
5028 {
5029 if (initial_offset_ptr->var)
5030 {
5031 initial_offset_ptr->var
5032 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5033 ssize_int (reg_parm_stack_space));
5034 initial_offset_ptr->constant = 0;
5035 }
5036 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5037 initial_offset_ptr->constant = reg_parm_stack_space;
5038 }
5039 }
5040 #endif /* REG_PARM_STACK_SPACE */
5041
5042 arg_size_ptr->var = 0;
5043 arg_size_ptr->constant = 0;
5044
5045 #ifdef ARGS_GROW_DOWNWARD
5046 if (initial_offset_ptr->var)
5047 {
5048 offset_ptr->constant = 0;
5049 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5050 initial_offset_ptr->var);
5051 }
5052 else
5053 {
5054 offset_ptr->constant = -initial_offset_ptr->constant;
5055 offset_ptr->var = 0;
5056 }
5057 if (where_pad != none
5058 && (TREE_CODE (sizetree) != INTEGER_CST
5059 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5060 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5061 SUB_PARM_SIZE (*offset_ptr, sizetree);
5062 if (where_pad != downward)
5063 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5064 if (initial_offset_ptr->var)
5065 arg_size_ptr->var = size_binop (MINUS_EXPR,
5066 size_binop (MINUS_EXPR,
5067 ssize_int (0),
5068 initial_offset_ptr->var),
5069 offset_ptr->var);
5070
5071 else
5072 arg_size_ptr->constant = (-initial_offset_ptr->constant
5073 - offset_ptr->constant);
5074
5075 #else /* !ARGS_GROW_DOWNWARD */
5076 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5077 *offset_ptr = *initial_offset_ptr;
5078
5079 #ifdef PUSH_ROUNDING
5080 if (passed_mode != BLKmode)
5081 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5082 #endif
5083
5084 /* Pad_below needs the pre-rounded size to know how much to pad below
5085 so this must be done before rounding up. */
5086 if (where_pad == downward
5087 /* However, BLKmode args passed in regs have their padding done elsewhere.
5088 The stack slot must be able to hold the entire register. */
5089 && !(in_regs && passed_mode == BLKmode))
5090 pad_below (offset_ptr, passed_mode, sizetree);
5091
5092 if (where_pad != none
5093 && (TREE_CODE (sizetree) != INTEGER_CST
5094 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5095 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5096
5097 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5098 #endif /* ARGS_GROW_DOWNWARD */
5099 }
5100
5101 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5102 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5103
5104 static void
5105 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5106 struct args_size *offset_ptr;
5107 int boundary;
5108 struct args_size *alignment_pad;
5109 {
5110 tree save_var = NULL_TREE;
5111 HOST_WIDE_INT save_constant = 0;
5112
5113 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5114
5115 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5116 {
5117 save_var = offset_ptr->var;
5118 save_constant = offset_ptr->constant;
5119 }
5120
5121 alignment_pad->var = NULL_TREE;
5122 alignment_pad->constant = 0;
5123
5124 if (boundary > BITS_PER_UNIT)
5125 {
5126 if (offset_ptr->var)
5127 {
5128 offset_ptr->var =
5129 #ifdef ARGS_GROW_DOWNWARD
5130 round_down
5131 #else
5132 round_up
5133 #endif
5134 (ARGS_SIZE_TREE (*offset_ptr),
5135 boundary / BITS_PER_UNIT);
5136 offset_ptr->constant = 0; /*?*/
5137 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5138 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5139 save_var);
5140 }
5141 else
5142 {
5143 offset_ptr->constant =
5144 #ifdef ARGS_GROW_DOWNWARD
5145 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5146 #else
5147 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5148 #endif
5149 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5150 alignment_pad->constant = offset_ptr->constant - save_constant;
5151 }
5152 }
5153 }
5154
5155 #ifndef ARGS_GROW_DOWNWARD
5156 static void
5157 pad_below (offset_ptr, passed_mode, sizetree)
5158 struct args_size *offset_ptr;
5159 enum machine_mode passed_mode;
5160 tree sizetree;
5161 {
5162 if (passed_mode != BLKmode)
5163 {
5164 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5165 offset_ptr->constant
5166 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5167 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5168 - GET_MODE_SIZE (passed_mode));
5169 }
5170 else
5171 {
5172 if (TREE_CODE (sizetree) != INTEGER_CST
5173 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5174 {
5175 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5176 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5177 /* Add it in. */
5178 ADD_PARM_SIZE (*offset_ptr, s2);
5179 SUB_PARM_SIZE (*offset_ptr, sizetree);
5180 }
5181 }
5182 }
5183 #endif
5184 \f
5185 /* Walk the tree of blocks describing the binding levels within a function
5186 and warn about uninitialized variables.
5187 This is done after calling flow_analysis and before global_alloc
5188 clobbers the pseudo-regs to hard regs. */
5189
5190 void
5191 uninitialized_vars_warning (block)
5192 tree block;
5193 {
5194 register tree decl, sub;
5195 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5196 {
5197 if (warn_uninitialized
5198 && TREE_CODE (decl) == VAR_DECL
5199 /* These warnings are unreliable for and aggregates
5200 because assigning the fields one by one can fail to convince
5201 flow.c that the entire aggregate was initialized.
5202 Unions are troublesome because members may be shorter. */
5203 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5204 && DECL_RTL (decl) != 0
5205 && GET_CODE (DECL_RTL (decl)) == REG
5206 /* Global optimizations can make it difficult to determine if a
5207 particular variable has been initialized. However, a VAR_DECL
5208 with a nonzero DECL_INITIAL had an initializer, so do not
5209 claim it is potentially uninitialized.
5210
5211 We do not care about the actual value in DECL_INITIAL, so we do
5212 not worry that it may be a dangling pointer. */
5213 && DECL_INITIAL (decl) == NULL_TREE
5214 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5215 warning_with_decl (decl,
5216 "`%s' might be used uninitialized in this function");
5217 if (extra_warnings
5218 && TREE_CODE (decl) == VAR_DECL
5219 && DECL_RTL (decl) != 0
5220 && GET_CODE (DECL_RTL (decl)) == REG
5221 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5222 warning_with_decl (decl,
5223 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5224 }
5225 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5226 uninitialized_vars_warning (sub);
5227 }
5228
5229 /* Do the appropriate part of uninitialized_vars_warning
5230 but for arguments instead of local variables. */
5231
5232 void
5233 setjmp_args_warning ()
5234 {
5235 register tree decl;
5236 for (decl = DECL_ARGUMENTS (current_function_decl);
5237 decl; decl = TREE_CHAIN (decl))
5238 if (DECL_RTL (decl) != 0
5239 && GET_CODE (DECL_RTL (decl)) == REG
5240 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5241 warning_with_decl (decl,
5242 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5243 }
5244
5245 /* If this function call setjmp, put all vars into the stack
5246 unless they were declared `register'. */
5247
5248 void
5249 setjmp_protect (block)
5250 tree block;
5251 {
5252 register tree decl, sub;
5253 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5254 if ((TREE_CODE (decl) == VAR_DECL
5255 || TREE_CODE (decl) == PARM_DECL)
5256 && DECL_RTL (decl) != 0
5257 && (GET_CODE (DECL_RTL (decl)) == REG
5258 || (GET_CODE (DECL_RTL (decl)) == MEM
5259 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5260 /* If this variable came from an inline function, it must be
5261 that its life doesn't overlap the setjmp. If there was a
5262 setjmp in the function, it would already be in memory. We
5263 must exclude such variable because their DECL_RTL might be
5264 set to strange things such as virtual_stack_vars_rtx. */
5265 && ! DECL_FROM_INLINE (decl)
5266 && (
5267 #ifdef NON_SAVING_SETJMP
5268 /* If longjmp doesn't restore the registers,
5269 don't put anything in them. */
5270 NON_SAVING_SETJMP
5271 ||
5272 #endif
5273 ! DECL_REGISTER (decl)))
5274 put_var_into_stack (decl);
5275 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5276 setjmp_protect (sub);
5277 }
5278 \f
5279 /* Like the previous function, but for args instead of local variables. */
5280
5281 void
5282 setjmp_protect_args ()
5283 {
5284 register tree decl;
5285 for (decl = DECL_ARGUMENTS (current_function_decl);
5286 decl; decl = TREE_CHAIN (decl))
5287 if ((TREE_CODE (decl) == VAR_DECL
5288 || TREE_CODE (decl) == PARM_DECL)
5289 && DECL_RTL (decl) != 0
5290 && (GET_CODE (DECL_RTL (decl)) == REG
5291 || (GET_CODE (DECL_RTL (decl)) == MEM
5292 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5293 && (
5294 /* If longjmp doesn't restore the registers,
5295 don't put anything in them. */
5296 #ifdef NON_SAVING_SETJMP
5297 NON_SAVING_SETJMP
5298 ||
5299 #endif
5300 ! DECL_REGISTER (decl)))
5301 put_var_into_stack (decl);
5302 }
5303 \f
5304 /* Return the context-pointer register corresponding to DECL,
5305 or 0 if it does not need one. */
5306
5307 rtx
5308 lookup_static_chain (decl)
5309 tree decl;
5310 {
5311 tree context = decl_function_context (decl);
5312 tree link;
5313
5314 if (context == 0
5315 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5316 return 0;
5317
5318 /* We treat inline_function_decl as an alias for the current function
5319 because that is the inline function whose vars, types, etc.
5320 are being merged into the current function.
5321 See expand_inline_function. */
5322 if (context == current_function_decl || context == inline_function_decl)
5323 return virtual_stack_vars_rtx;
5324
5325 for (link = context_display; link; link = TREE_CHAIN (link))
5326 if (TREE_PURPOSE (link) == context)
5327 return RTL_EXPR_RTL (TREE_VALUE (link));
5328
5329 abort ();
5330 }
5331 \f
5332 /* Convert a stack slot address ADDR for variable VAR
5333 (from a containing function)
5334 into an address valid in this function (using a static chain). */
5335
5336 rtx
5337 fix_lexical_addr (addr, var)
5338 rtx addr;
5339 tree var;
5340 {
5341 rtx basereg;
5342 HOST_WIDE_INT displacement;
5343 tree context = decl_function_context (var);
5344 struct function *fp;
5345 rtx base = 0;
5346
5347 /* If this is the present function, we need not do anything. */
5348 if (context == current_function_decl || context == inline_function_decl)
5349 return addr;
5350
5351 for (fp = outer_function_chain; fp; fp = fp->next)
5352 if (fp->decl == context)
5353 break;
5354
5355 if (fp == 0)
5356 abort ();
5357
5358 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5359 addr = XEXP (XEXP (addr, 0), 0);
5360
5361 /* Decode given address as base reg plus displacement. */
5362 if (GET_CODE (addr) == REG)
5363 basereg = addr, displacement = 0;
5364 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5365 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5366 else
5367 abort ();
5368
5369 /* We accept vars reached via the containing function's
5370 incoming arg pointer and via its stack variables pointer. */
5371 if (basereg == fp->internal_arg_pointer)
5372 {
5373 /* If reached via arg pointer, get the arg pointer value
5374 out of that function's stack frame.
5375
5376 There are two cases: If a separate ap is needed, allocate a
5377 slot in the outer function for it and dereference it that way.
5378 This is correct even if the real ap is actually a pseudo.
5379 Otherwise, just adjust the offset from the frame pointer to
5380 compensate. */
5381
5382 #ifdef NEED_SEPARATE_AP
5383 rtx addr;
5384
5385 if (fp->x_arg_pointer_save_area == 0)
5386 fp->x_arg_pointer_save_area
5387 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5388
5389 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5390 addr = memory_address (Pmode, addr);
5391
5392 base = gen_rtx_MEM (Pmode, addr);
5393 MEM_ALIAS_SET (base) = get_frame_alias_set ();
5394 base = copy_to_reg (base);
5395 #else
5396 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5397 base = lookup_static_chain (var);
5398 #endif
5399 }
5400
5401 else if (basereg == virtual_stack_vars_rtx)
5402 {
5403 /* This is the same code as lookup_static_chain, duplicated here to
5404 avoid an extra call to decl_function_context. */
5405 tree link;
5406
5407 for (link = context_display; link; link = TREE_CHAIN (link))
5408 if (TREE_PURPOSE (link) == context)
5409 {
5410 base = RTL_EXPR_RTL (TREE_VALUE (link));
5411 break;
5412 }
5413 }
5414
5415 if (base == 0)
5416 abort ();
5417
5418 /* Use same offset, relative to appropriate static chain or argument
5419 pointer. */
5420 return plus_constant (base, displacement);
5421 }
5422 \f
5423 /* Return the address of the trampoline for entering nested fn FUNCTION.
5424 If necessary, allocate a trampoline (in the stack frame)
5425 and emit rtl to initialize its contents (at entry to this function). */
5426
5427 rtx
5428 trampoline_address (function)
5429 tree function;
5430 {
5431 tree link;
5432 tree rtlexp;
5433 rtx tramp;
5434 struct function *fp;
5435 tree fn_context;
5436
5437 /* Find an existing trampoline and return it. */
5438 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5439 if (TREE_PURPOSE (link) == function)
5440 return
5441 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5442
5443 for (fp = outer_function_chain; fp; fp = fp->next)
5444 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5445 if (TREE_PURPOSE (link) == function)
5446 {
5447 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5448 function);
5449 return round_trampoline_addr (tramp);
5450 }
5451
5452 /* None exists; we must make one. */
5453
5454 /* Find the `struct function' for the function containing FUNCTION. */
5455 fp = 0;
5456 fn_context = decl_function_context (function);
5457 if (fn_context != current_function_decl
5458 && fn_context != inline_function_decl)
5459 for (fp = outer_function_chain; fp; fp = fp->next)
5460 if (fp->decl == fn_context)
5461 break;
5462
5463 /* Allocate run-time space for this trampoline
5464 (usually in the defining function's stack frame). */
5465 #ifdef ALLOCATE_TRAMPOLINE
5466 tramp = ALLOCATE_TRAMPOLINE (fp);
5467 #else
5468 /* If rounding needed, allocate extra space
5469 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5470 #ifdef TRAMPOLINE_ALIGNMENT
5471 #define TRAMPOLINE_REAL_SIZE \
5472 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5473 #else
5474 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5475 #endif
5476 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5477 fp ? fp : cfun);
5478 #endif
5479
5480 /* Record the trampoline for reuse and note it for later initialization
5481 by expand_function_end. */
5482 if (fp != 0)
5483 {
5484 push_obstacks (fp->function_maybepermanent_obstack,
5485 fp->function_maybepermanent_obstack);
5486 rtlexp = make_node (RTL_EXPR);
5487 RTL_EXPR_RTL (rtlexp) = tramp;
5488 fp->x_trampoline_list = tree_cons (function, rtlexp,
5489 fp->x_trampoline_list);
5490 pop_obstacks ();
5491 }
5492 else
5493 {
5494 /* Make the RTL_EXPR node temporary, not momentary, so that the
5495 trampoline_list doesn't become garbage. */
5496 int momentary = suspend_momentary ();
5497 rtlexp = make_node (RTL_EXPR);
5498 resume_momentary (momentary);
5499
5500 RTL_EXPR_RTL (rtlexp) = tramp;
5501 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5502 }
5503
5504 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5505 return round_trampoline_addr (tramp);
5506 }
5507
5508 /* Given a trampoline address,
5509 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5510
5511 static rtx
5512 round_trampoline_addr (tramp)
5513 rtx tramp;
5514 {
5515 #ifdef TRAMPOLINE_ALIGNMENT
5516 /* Round address up to desired boundary. */
5517 rtx temp = gen_reg_rtx (Pmode);
5518 temp = expand_binop (Pmode, add_optab, tramp,
5519 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5520 temp, 0, OPTAB_LIB_WIDEN);
5521 tramp = expand_binop (Pmode, and_optab, temp,
5522 GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5523 temp, 0, OPTAB_LIB_WIDEN);
5524 #endif
5525 return tramp;
5526 }
5527 \f
5528 /* Put all this function's BLOCK nodes including those that are chained
5529 onto the first block into a vector, and return it.
5530 Also store in each NOTE for the beginning or end of a block
5531 the index of that block in the vector.
5532 The arguments are BLOCK, the chain of top-level blocks of the function,
5533 and INSNS, the insn chain of the function. */
5534
5535 void
5536 identify_blocks ()
5537 {
5538 int n_blocks;
5539 tree *block_vector, *last_block_vector;
5540 tree *block_stack;
5541 tree block = DECL_INITIAL (current_function_decl);
5542
5543 if (block == 0)
5544 return;
5545
5546 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5547 depth-first order. */
5548 block_vector = get_block_vector (block, &n_blocks);
5549 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5550
5551 last_block_vector = identify_blocks_1 (get_insns (),
5552 block_vector + 1,
5553 block_vector + n_blocks,
5554 block_stack);
5555
5556 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5557 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5558 if (0 && last_block_vector != block_vector + n_blocks)
5559 abort ();
5560
5561 free (block_vector);
5562 free (block_stack);
5563 }
5564
5565 /* Subroutine of identify_blocks. Do the block substitution on the
5566 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5567
5568 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5569 BLOCK_VECTOR is incremented for each block seen. */
5570
5571 static tree *
5572 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5573 rtx insns;
5574 tree *block_vector;
5575 tree *end_block_vector;
5576 tree *orig_block_stack;
5577 {
5578 rtx insn;
5579 tree *block_stack = orig_block_stack;
5580
5581 for (insn = insns; insn; insn = NEXT_INSN (insn))
5582 {
5583 if (GET_CODE (insn) == NOTE)
5584 {
5585 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5586 {
5587 tree b;
5588
5589 /* If there are more block notes than BLOCKs, something
5590 is badly wrong. */
5591 if (block_vector == end_block_vector)
5592 abort ();
5593
5594 b = *block_vector++;
5595 NOTE_BLOCK (insn) = b;
5596 *block_stack++ = b;
5597 }
5598 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5599 {
5600 /* If there are more NOTE_INSN_BLOCK_ENDs than
5601 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5602 if (block_stack == orig_block_stack)
5603 abort ();
5604
5605 NOTE_BLOCK (insn) = *--block_stack;
5606 }
5607 }
5608 else if (GET_CODE (insn) == CALL_INSN
5609 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5610 {
5611 rtx cp = PATTERN (insn);
5612
5613 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5614 end_block_vector, block_stack);
5615 if (XEXP (cp, 1))
5616 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5617 end_block_vector, block_stack);
5618 if (XEXP (cp, 2))
5619 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5620 end_block_vector, block_stack);
5621 }
5622 }
5623
5624 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5625 something is badly wrong. */
5626 if (block_stack != orig_block_stack)
5627 abort ();
5628
5629 return block_vector;
5630 }
5631
5632 /* Identify BLOCKs referenced by more than one
5633 NOTE_INSN_BLOCK_{BEG,END}, and create duplicate blocks. */
5634
5635 void
5636 reorder_blocks ()
5637 {
5638 tree block = DECL_INITIAL (current_function_decl);
5639 varray_type block_stack;
5640
5641 if (block == NULL_TREE)
5642 return;
5643
5644 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5645
5646 /* Prune the old trees away, so that they don't get in the way. */
5647 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5648 BLOCK_CHAIN (block) = NULL_TREE;
5649
5650 reorder_blocks_1 (get_insns (), block, &block_stack);
5651
5652 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5653
5654 VARRAY_FREE (block_stack);
5655 }
5656
5657 /* Helper function for reorder_blocks. Process the insn chain beginning
5658 at INSNS. Recurse for CALL_PLACEHOLDER insns. */
5659
5660 static void
5661 reorder_blocks_1 (insns, current_block, p_block_stack)
5662 rtx insns;
5663 tree current_block;
5664 varray_type *p_block_stack;
5665 {
5666 rtx insn;
5667
5668 for (insn = insns; insn; insn = NEXT_INSN (insn))
5669 {
5670 if (GET_CODE (insn) == NOTE)
5671 {
5672 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5673 {
5674 tree block = NOTE_BLOCK (insn);
5675 /* If we have seen this block before, copy it. */
5676 if (TREE_ASM_WRITTEN (block))
5677 {
5678 block = copy_node (block);
5679 NOTE_BLOCK (insn) = block;
5680 }
5681 BLOCK_SUBBLOCKS (block) = 0;
5682 TREE_ASM_WRITTEN (block) = 1;
5683 BLOCK_SUPERCONTEXT (block) = current_block;
5684 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5685 BLOCK_SUBBLOCKS (current_block) = block;
5686 current_block = block;
5687 VARRAY_PUSH_TREE (*p_block_stack, block);
5688 }
5689 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5690 {
5691 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5692 VARRAY_POP (*p_block_stack);
5693 BLOCK_SUBBLOCKS (current_block)
5694 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5695 current_block = BLOCK_SUPERCONTEXT (current_block);
5696 }
5697 }
5698 else if (GET_CODE (insn) == CALL_INSN
5699 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5700 {
5701 rtx cp = PATTERN (insn);
5702 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5703 if (XEXP (cp, 1))
5704 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5705 if (XEXP (cp, 2))
5706 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5707 }
5708 }
5709 }
5710
5711 /* Reverse the order of elements in the chain T of blocks,
5712 and return the new head of the chain (old last element). */
5713
5714 static tree
5715 blocks_nreverse (t)
5716 tree t;
5717 {
5718 register tree prev = 0, decl, next;
5719 for (decl = t; decl; decl = next)
5720 {
5721 next = BLOCK_CHAIN (decl);
5722 BLOCK_CHAIN (decl) = prev;
5723 prev = decl;
5724 }
5725 return prev;
5726 }
5727
5728 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
5729 non-NULL, list them all into VECTOR, in a depth-first preorder
5730 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
5731 blocks. */
5732
5733 static int
5734 all_blocks (block, vector)
5735 tree block;
5736 tree *vector;
5737 {
5738 int n_blocks = 0;
5739
5740 while (block)
5741 {
5742 TREE_ASM_WRITTEN (block) = 0;
5743
5744 /* Record this block. */
5745 if (vector)
5746 vector[n_blocks] = block;
5747
5748 ++n_blocks;
5749
5750 /* Record the subblocks, and their subblocks... */
5751 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5752 vector ? vector + n_blocks : 0);
5753 block = BLOCK_CHAIN (block);
5754 }
5755
5756 return n_blocks;
5757 }
5758
5759 /* Return a vector containing all the blocks rooted at BLOCK. The
5760 number of elements in the vector is stored in N_BLOCKS_P. The
5761 vector is dynamically allocated; it is the caller's responsibility
5762 to call `free' on the pointer returned. */
5763
5764 static tree *
5765 get_block_vector (block, n_blocks_p)
5766 tree block;
5767 int *n_blocks_p;
5768 {
5769 tree *block_vector;
5770
5771 *n_blocks_p = all_blocks (block, NULL);
5772 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
5773 all_blocks (block, block_vector);
5774
5775 return block_vector;
5776 }
5777
5778 static int next_block_index = 2;
5779
5780 /* Set BLOCK_NUMBER for all the blocks in FN. */
5781
5782 void
5783 number_blocks (fn)
5784 tree fn;
5785 {
5786 int i;
5787 int n_blocks;
5788 tree *block_vector;
5789
5790 /* For SDB and XCOFF debugging output, we start numbering the blocks
5791 from 1 within each function, rather than keeping a running
5792 count. */
5793 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
5794 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
5795 next_block_index = 1;
5796 #endif
5797
5798 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
5799
5800 /* The top-level BLOCK isn't numbered at all. */
5801 for (i = 1; i < n_blocks; ++i)
5802 /* We number the blocks from two. */
5803 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
5804
5805 free (block_vector);
5806
5807 return;
5808 }
5809 \f
5810 /* Allocate a function structure and reset its contents to the defaults. */
5811 static void
5812 prepare_function_start ()
5813 {
5814 cfun = (struct function *) xcalloc (1, sizeof (struct function));
5815
5816 init_stmt_for_function ();
5817 init_eh_for_function ();
5818
5819 cse_not_expected = ! optimize;
5820
5821 /* Caller save not needed yet. */
5822 caller_save_needed = 0;
5823
5824 /* No stack slots have been made yet. */
5825 stack_slot_list = 0;
5826
5827 current_function_has_nonlocal_label = 0;
5828 current_function_has_nonlocal_goto = 0;
5829
5830 /* There is no stack slot for handling nonlocal gotos. */
5831 nonlocal_goto_handler_slots = 0;
5832 nonlocal_goto_stack_level = 0;
5833
5834 /* No labels have been declared for nonlocal use. */
5835 nonlocal_labels = 0;
5836 nonlocal_goto_handler_labels = 0;
5837
5838 /* No function calls so far in this function. */
5839 function_call_count = 0;
5840
5841 /* No parm regs have been allocated.
5842 (This is important for output_inline_function.) */
5843 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5844
5845 /* Initialize the RTL mechanism. */
5846 init_emit ();
5847
5848 /* Initialize the queue of pending postincrement and postdecrements,
5849 and some other info in expr.c. */
5850 init_expr ();
5851
5852 /* We haven't done register allocation yet. */
5853 reg_renumber = 0;
5854
5855 init_varasm_status (cfun);
5856
5857 /* Clear out data used for inlining. */
5858 cfun->inlinable = 0;
5859 cfun->original_decl_initial = 0;
5860 cfun->original_arg_vector = 0;
5861
5862 #ifdef STACK_BOUNDARY
5863 cfun->stack_alignment_needed = STACK_BOUNDARY;
5864 cfun->preferred_stack_boundary = STACK_BOUNDARY;
5865 #else
5866 cfun->stack_alignment_needed = 0;
5867 cfun->preferred_stack_boundary = 0;
5868 #endif
5869
5870 /* Set if a call to setjmp is seen. */
5871 current_function_calls_setjmp = 0;
5872
5873 /* Set if a call to longjmp is seen. */
5874 current_function_calls_longjmp = 0;
5875
5876 current_function_calls_alloca = 0;
5877 current_function_contains_functions = 0;
5878 current_function_is_leaf = 0;
5879 current_function_nothrow = 0;
5880 current_function_sp_is_unchanging = 0;
5881 current_function_uses_only_leaf_regs = 0;
5882 current_function_has_computed_jump = 0;
5883 current_function_is_thunk = 0;
5884
5885 current_function_returns_pcc_struct = 0;
5886 current_function_returns_struct = 0;
5887 current_function_epilogue_delay_list = 0;
5888 current_function_uses_const_pool = 0;
5889 current_function_uses_pic_offset_table = 0;
5890 current_function_cannot_inline = 0;
5891
5892 /* We have not yet needed to make a label to jump to for tail-recursion. */
5893 tail_recursion_label = 0;
5894
5895 /* We haven't had a need to make a save area for ap yet. */
5896 arg_pointer_save_area = 0;
5897
5898 /* No stack slots allocated yet. */
5899 frame_offset = 0;
5900
5901 /* No SAVE_EXPRs in this function yet. */
5902 save_expr_regs = 0;
5903
5904 /* No RTL_EXPRs in this function yet. */
5905 rtl_expr_chain = 0;
5906
5907 /* Set up to allocate temporaries. */
5908 init_temp_slots ();
5909
5910 /* Indicate that we need to distinguish between the return value of the
5911 present function and the return value of a function being called. */
5912 rtx_equal_function_value_matters = 1;
5913
5914 /* Indicate that we have not instantiated virtual registers yet. */
5915 virtuals_instantiated = 0;
5916
5917 /* Indicate we have no need of a frame pointer yet. */
5918 frame_pointer_needed = 0;
5919
5920 /* By default assume not varargs or stdarg. */
5921 current_function_varargs = 0;
5922 current_function_stdarg = 0;
5923
5924 /* We haven't made any trampolines for this function yet. */
5925 trampoline_list = 0;
5926
5927 init_pending_stack_adjust ();
5928 inhibit_defer_pop = 0;
5929
5930 current_function_outgoing_args_size = 0;
5931
5932 if (init_lang_status)
5933 (*init_lang_status) (cfun);
5934 if (init_machine_status)
5935 (*init_machine_status) (cfun);
5936 }
5937
5938 /* Initialize the rtl expansion mechanism so that we can do simple things
5939 like generate sequences. This is used to provide a context during global
5940 initialization of some passes. */
5941 void
5942 init_dummy_function_start ()
5943 {
5944 prepare_function_start ();
5945 }
5946
5947 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5948 and initialize static variables for generating RTL for the statements
5949 of the function. */
5950
5951 void
5952 init_function_start (subr, filename, line)
5953 tree subr;
5954 const char *filename;
5955 int line;
5956 {
5957 prepare_function_start ();
5958
5959 /* Remember this function for later. */
5960 cfun->next_global = all_functions;
5961 all_functions = cfun;
5962
5963 current_function_name = (*decl_printable_name) (subr, 2);
5964 cfun->decl = subr;
5965
5966 /* Nonzero if this is a nested function that uses a static chain. */
5967
5968 current_function_needs_context
5969 = (decl_function_context (current_function_decl) != 0
5970 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5971
5972 /* Within function body, compute a type's size as soon it is laid out. */
5973 immediate_size_expand++;
5974
5975 /* Prevent ever trying to delete the first instruction of a function.
5976 Also tell final how to output a linenum before the function prologue.
5977 Note linenums could be missing, e.g. when compiling a Java .class file. */
5978 if (line > 0)
5979 emit_line_note (filename, line);
5980
5981 /* Make sure first insn is a note even if we don't want linenums.
5982 This makes sure the first insn will never be deleted.
5983 Also, final expects a note to appear there. */
5984 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5985
5986 /* Set flags used by final.c. */
5987 if (aggregate_value_p (DECL_RESULT (subr)))
5988 {
5989 #ifdef PCC_STATIC_STRUCT_RETURN
5990 current_function_returns_pcc_struct = 1;
5991 #endif
5992 current_function_returns_struct = 1;
5993 }
5994
5995 /* Warn if this value is an aggregate type,
5996 regardless of which calling convention we are using for it. */
5997 if (warn_aggregate_return
5998 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5999 warning ("function returns an aggregate");
6000
6001 current_function_returns_pointer
6002 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6003 }
6004
6005 /* Make sure all values used by the optimization passes have sane
6006 defaults. */
6007 void
6008 init_function_for_compilation ()
6009 {
6010 reg_renumber = 0;
6011
6012 /* No prologue/epilogue insns yet. */
6013 VARRAY_GROW (prologue, 0);
6014 VARRAY_GROW (epilogue, 0);
6015 VARRAY_GROW (sibcall_epilogue, 0);
6016 }
6017
6018 /* Indicate that the current function uses extra args
6019 not explicitly mentioned in the argument list in any fashion. */
6020
6021 void
6022 mark_varargs ()
6023 {
6024 current_function_varargs = 1;
6025 }
6026
6027 /* Expand a call to __main at the beginning of a possible main function. */
6028
6029 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6030 #undef HAS_INIT_SECTION
6031 #define HAS_INIT_SECTION
6032 #endif
6033
6034 void
6035 expand_main_function ()
6036 {
6037 #if !defined (HAS_INIT_SECTION)
6038 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
6039 VOIDmode, 0);
6040 #endif /* not HAS_INIT_SECTION */
6041 }
6042 \f
6043 extern struct obstack permanent_obstack;
6044
6045 /* Start the RTL for a new function, and set variables used for
6046 emitting RTL.
6047 SUBR is the FUNCTION_DECL node.
6048 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6049 the function's parameters, which must be run at any return statement. */
6050
6051 void
6052 expand_function_start (subr, parms_have_cleanups)
6053 tree subr;
6054 int parms_have_cleanups;
6055 {
6056 tree tem;
6057 rtx last_ptr = NULL_RTX;
6058
6059 /* Make sure volatile mem refs aren't considered
6060 valid operands of arithmetic insns. */
6061 init_recog_no_volatile ();
6062
6063 /* Set this before generating any memory accesses. */
6064 current_function_check_memory_usage
6065 = (flag_check_memory_usage
6066 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6067
6068 current_function_instrument_entry_exit
6069 = (flag_instrument_function_entry_exit
6070 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6071
6072 current_function_limit_stack
6073 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6074
6075 /* If function gets a static chain arg, store it in the stack frame.
6076 Do this first, so it gets the first stack slot offset. */
6077 if (current_function_needs_context)
6078 {
6079 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6080
6081 /* Delay copying static chain if it is not a register to avoid
6082 conflicts with regs used for parameters. */
6083 if (! SMALL_REGISTER_CLASSES
6084 || GET_CODE (static_chain_incoming_rtx) == REG)
6085 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6086 }
6087
6088 /* If the parameters of this function need cleaning up, get a label
6089 for the beginning of the code which executes those cleanups. This must
6090 be done before doing anything with return_label. */
6091 if (parms_have_cleanups)
6092 cleanup_label = gen_label_rtx ();
6093 else
6094 cleanup_label = 0;
6095
6096 /* Make the label for return statements to jump to, if this machine
6097 does not have a one-instruction return and uses an epilogue,
6098 or if it returns a structure, or if it has parm cleanups. */
6099 #ifdef HAVE_return
6100 if (cleanup_label == 0 && HAVE_return
6101 && ! current_function_instrument_entry_exit
6102 && ! current_function_returns_pcc_struct
6103 && ! (current_function_returns_struct && ! optimize))
6104 return_label = 0;
6105 else
6106 return_label = gen_label_rtx ();
6107 #else
6108 return_label = gen_label_rtx ();
6109 #endif
6110
6111 /* Initialize rtx used to return the value. */
6112 /* Do this before assign_parms so that we copy the struct value address
6113 before any library calls that assign parms might generate. */
6114
6115 /* Decide whether to return the value in memory or in a register. */
6116 if (aggregate_value_p (DECL_RESULT (subr)))
6117 {
6118 /* Returning something that won't go in a register. */
6119 register rtx value_address = 0;
6120
6121 #ifdef PCC_STATIC_STRUCT_RETURN
6122 if (current_function_returns_pcc_struct)
6123 {
6124 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6125 value_address = assemble_static_space (size);
6126 }
6127 else
6128 #endif
6129 {
6130 /* Expect to be passed the address of a place to store the value.
6131 If it is passed as an argument, assign_parms will take care of
6132 it. */
6133 if (struct_value_incoming_rtx)
6134 {
6135 value_address = gen_reg_rtx (Pmode);
6136 emit_move_insn (value_address, struct_value_incoming_rtx);
6137 }
6138 }
6139 if (value_address)
6140 {
6141 DECL_RTL (DECL_RESULT (subr))
6142 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6143 set_mem_attributes (DECL_RTL (DECL_RESULT (subr)),
6144 DECL_RESULT (subr), 1);
6145 }
6146 }
6147 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6148 /* If return mode is void, this decl rtl should not be used. */
6149 DECL_RTL (DECL_RESULT (subr)) = 0;
6150 else if (parms_have_cleanups || current_function_instrument_entry_exit)
6151 {
6152 /* If function will end with cleanup code for parms,
6153 compute the return values into a pseudo reg,
6154 which we will copy into the true return register
6155 after the cleanups are done. */
6156
6157 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
6158
6159 #ifdef PROMOTE_FUNCTION_RETURN
6160 tree type = TREE_TYPE (DECL_RESULT (subr));
6161 int unsignedp = TREE_UNSIGNED (type);
6162
6163 mode = promote_mode (type, mode, &unsignedp, 1);
6164 #endif
6165
6166 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6167 }
6168 else
6169 /* Scalar, returned in a register. */
6170 {
6171 DECL_RTL (DECL_RESULT (subr))
6172 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)), subr, 1);
6173
6174 /* Mark this reg as the function's return value. */
6175 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6176 {
6177 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6178 /* Needed because we may need to move this to memory
6179 in case it's a named return value whose address is taken. */
6180 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6181 }
6182 }
6183
6184 /* Initialize rtx for parameters and local variables.
6185 In some cases this requires emitting insns. */
6186
6187 assign_parms (subr);
6188
6189 /* Copy the static chain now if it wasn't a register. The delay is to
6190 avoid conflicts with the parameter passing registers. */
6191
6192 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6193 if (GET_CODE (static_chain_incoming_rtx) != REG)
6194 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6195
6196 /* The following was moved from init_function_start.
6197 The move is supposed to make sdb output more accurate. */
6198 /* Indicate the beginning of the function body,
6199 as opposed to parm setup. */
6200 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6201
6202 if (GET_CODE (get_last_insn ()) != NOTE)
6203 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6204 parm_birth_insn = get_last_insn ();
6205
6206 context_display = 0;
6207 if (current_function_needs_context)
6208 {
6209 /* Fetch static chain values for containing functions. */
6210 tem = decl_function_context (current_function_decl);
6211 /* Copy the static chain pointer into a pseudo. If we have
6212 small register classes, copy the value from memory if
6213 static_chain_incoming_rtx is a REG. */
6214 if (tem)
6215 {
6216 /* If the static chain originally came in a register, put it back
6217 there, then move it out in the next insn. The reason for
6218 this peculiar code is to satisfy function integration. */
6219 if (SMALL_REGISTER_CLASSES
6220 && GET_CODE (static_chain_incoming_rtx) == REG)
6221 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6222 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6223 }
6224
6225 while (tem)
6226 {
6227 tree rtlexp = make_node (RTL_EXPR);
6228
6229 RTL_EXPR_RTL (rtlexp) = last_ptr;
6230 context_display = tree_cons (tem, rtlexp, context_display);
6231 tem = decl_function_context (tem);
6232 if (tem == 0)
6233 break;
6234 /* Chain thru stack frames, assuming pointer to next lexical frame
6235 is found at the place we always store it. */
6236 #ifdef FRAME_GROWS_DOWNWARD
6237 last_ptr = plus_constant (last_ptr, -GET_MODE_SIZE (Pmode));
6238 #endif
6239 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6240 MEM_ALIAS_SET (last_ptr) = get_frame_alias_set ();
6241 last_ptr = copy_to_reg (last_ptr);
6242
6243 /* If we are not optimizing, ensure that we know that this
6244 piece of context is live over the entire function. */
6245 if (! optimize)
6246 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6247 save_expr_regs);
6248 }
6249 }
6250
6251 if (current_function_instrument_entry_exit)
6252 {
6253 rtx fun = DECL_RTL (current_function_decl);
6254 if (GET_CODE (fun) == MEM)
6255 fun = XEXP (fun, 0);
6256 else
6257 abort ();
6258 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6259 fun, Pmode,
6260 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6261 0,
6262 hard_frame_pointer_rtx),
6263 Pmode);
6264 }
6265
6266 /* After the display initializations is where the tail-recursion label
6267 should go, if we end up needing one. Ensure we have a NOTE here
6268 since some things (like trampolines) get placed before this. */
6269 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6270
6271 /* Evaluate now the sizes of any types declared among the arguments. */
6272 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6273 {
6274 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6275 EXPAND_MEMORY_USE_BAD);
6276 /* Flush the queue in case this parameter declaration has
6277 side-effects. */
6278 emit_queue ();
6279 }
6280
6281 /* Make sure there is a line number after the function entry setup code. */
6282 force_next_line_note ();
6283 }
6284 \f
6285 /* Undo the effects of init_dummy_function_start. */
6286 void
6287 expand_dummy_function_end ()
6288 {
6289 /* End any sequences that failed to be closed due to syntax errors. */
6290 while (in_sequence_p ())
6291 end_sequence ();
6292
6293 /* Outside function body, can't compute type's actual size
6294 until next function's body starts. */
6295
6296 free_after_parsing (cfun);
6297 free_after_compilation (cfun);
6298 free (cfun);
6299 cfun = 0;
6300 }
6301
6302 /* Call DOIT for each hard register used as a return value from
6303 the current function. */
6304
6305 void
6306 diddle_return_value (doit, arg)
6307 void (*doit) PARAMS ((rtx, void *));
6308 void *arg;
6309 {
6310 rtx outgoing = current_function_return_rtx;
6311 int pcc;
6312
6313 if (! outgoing)
6314 return;
6315
6316 pcc = (current_function_returns_struct
6317 || current_function_returns_pcc_struct);
6318
6319 if ((GET_CODE (outgoing) == REG
6320 && REGNO (outgoing) >= FIRST_PSEUDO_REGISTER)
6321 || pcc)
6322 {
6323 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6324
6325 /* A PCC-style return returns a pointer to the memory in which
6326 the structure is stored. */
6327 if (pcc)
6328 type = build_pointer_type (type);
6329
6330 #ifdef FUNCTION_OUTGOING_VALUE
6331 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
6332 #else
6333 outgoing = FUNCTION_VALUE (type, current_function_decl);
6334 #endif
6335 /* If this is a BLKmode structure being returned in registers, then use
6336 the mode computed in expand_return. */
6337 if (GET_MODE (outgoing) == BLKmode)
6338 PUT_MODE (outgoing,
6339 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6340 REG_FUNCTION_VALUE_P (outgoing) = 1;
6341 }
6342
6343 if (GET_CODE (outgoing) == REG)
6344 (*doit) (outgoing, arg);
6345 else if (GET_CODE (outgoing) == PARALLEL)
6346 {
6347 int i;
6348
6349 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6350 {
6351 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6352
6353 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6354 (*doit) (x, arg);
6355 }
6356 }
6357 }
6358
6359 static void
6360 do_clobber_return_reg (reg, arg)
6361 rtx reg;
6362 void *arg ATTRIBUTE_UNUSED;
6363 {
6364 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6365 }
6366
6367 void
6368 clobber_return_register ()
6369 {
6370 diddle_return_value (do_clobber_return_reg, NULL);
6371 }
6372
6373 static void
6374 do_use_return_reg (reg, arg)
6375 rtx reg;
6376 void *arg ATTRIBUTE_UNUSED;
6377 {
6378 emit_insn (gen_rtx_USE (VOIDmode, reg));
6379 }
6380
6381 void
6382 use_return_register ()
6383 {
6384 diddle_return_value (do_use_return_reg, NULL);
6385 }
6386
6387 /* Generate RTL for the end of the current function.
6388 FILENAME and LINE are the current position in the source file.
6389
6390 It is up to language-specific callers to do cleanups for parameters--
6391 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6392
6393 void
6394 expand_function_end (filename, line, end_bindings)
6395 const char *filename;
6396 int line;
6397 int end_bindings;
6398 {
6399 tree link;
6400
6401 #ifdef TRAMPOLINE_TEMPLATE
6402 static rtx initial_trampoline;
6403 #endif
6404
6405 finish_expr_for_function ();
6406
6407 #ifdef NON_SAVING_SETJMP
6408 /* Don't put any variables in registers if we call setjmp
6409 on a machine that fails to restore the registers. */
6410 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6411 {
6412 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6413 setjmp_protect (DECL_INITIAL (current_function_decl));
6414
6415 setjmp_protect_args ();
6416 }
6417 #endif
6418
6419 /* Save the argument pointer if a save area was made for it. */
6420 if (arg_pointer_save_area)
6421 {
6422 /* arg_pointer_save_area may not be a valid memory address, so we
6423 have to check it and fix it if necessary. */
6424 rtx seq;
6425 start_sequence ();
6426 emit_move_insn (validize_mem (arg_pointer_save_area),
6427 virtual_incoming_args_rtx);
6428 seq = gen_sequence ();
6429 end_sequence ();
6430 emit_insn_before (seq, tail_recursion_reentry);
6431 }
6432
6433 /* Initialize any trampolines required by this function. */
6434 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6435 {
6436 tree function = TREE_PURPOSE (link);
6437 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6438 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6439 #ifdef TRAMPOLINE_TEMPLATE
6440 rtx blktramp;
6441 #endif
6442 rtx seq;
6443
6444 #ifdef TRAMPOLINE_TEMPLATE
6445 /* First make sure this compilation has a template for
6446 initializing trampolines. */
6447 if (initial_trampoline == 0)
6448 {
6449 end_temporary_allocation ();
6450 initial_trampoline
6451 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6452 resume_temporary_allocation ();
6453
6454 ggc_add_rtx_root (&initial_trampoline, 1);
6455 }
6456 #endif
6457
6458 /* Generate insns to initialize the trampoline. */
6459 start_sequence ();
6460 tramp = round_trampoline_addr (XEXP (tramp, 0));
6461 #ifdef TRAMPOLINE_TEMPLATE
6462 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6463 emit_block_move (blktramp, initial_trampoline,
6464 GEN_INT (TRAMPOLINE_SIZE),
6465 TRAMPOLINE_ALIGNMENT);
6466 #endif
6467 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6468 seq = get_insns ();
6469 end_sequence ();
6470
6471 /* Put those insns at entry to the containing function (this one). */
6472 emit_insns_before (seq, tail_recursion_reentry);
6473 }
6474
6475 /* If we are doing stack checking and this function makes calls,
6476 do a stack probe at the start of the function to ensure we have enough
6477 space for another stack frame. */
6478 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6479 {
6480 rtx insn, seq;
6481
6482 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6483 if (GET_CODE (insn) == CALL_INSN)
6484 {
6485 start_sequence ();
6486 probe_stack_range (STACK_CHECK_PROTECT,
6487 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6488 seq = get_insns ();
6489 end_sequence ();
6490 emit_insns_before (seq, tail_recursion_reentry);
6491 break;
6492 }
6493 }
6494
6495 /* Warn about unused parms if extra warnings were specified. */
6496 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6497 warning. WARN_UNUSED_PARAMETER is negative when set by
6498 -Wunused. */
6499 if (warn_unused_parameter > 0
6500 || (warn_unused_parameter < 0 && extra_warnings))
6501 {
6502 tree decl;
6503
6504 for (decl = DECL_ARGUMENTS (current_function_decl);
6505 decl; decl = TREE_CHAIN (decl))
6506 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6507 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6508 warning_with_decl (decl, "unused parameter `%s'");
6509 }
6510
6511 /* Delete handlers for nonlocal gotos if nothing uses them. */
6512 if (nonlocal_goto_handler_slots != 0
6513 && ! current_function_has_nonlocal_label)
6514 delete_handlers ();
6515
6516 /* End any sequences that failed to be closed due to syntax errors. */
6517 while (in_sequence_p ())
6518 end_sequence ();
6519
6520 /* Outside function body, can't compute type's actual size
6521 until next function's body starts. */
6522 immediate_size_expand--;
6523
6524 clear_pending_stack_adjust ();
6525 do_pending_stack_adjust ();
6526
6527 /* Mark the end of the function body.
6528 If control reaches this insn, the function can drop through
6529 without returning a value. */
6530 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6531
6532 /* Must mark the last line number note in the function, so that the test
6533 coverage code can avoid counting the last line twice. This just tells
6534 the code to ignore the immediately following line note, since there
6535 already exists a copy of this note somewhere above. This line number
6536 note is still needed for debugging though, so we can't delete it. */
6537 if (flag_test_coverage)
6538 emit_note (NULL_PTR, NOTE_INSN_REPEATED_LINE_NUMBER);
6539
6540 /* Output a linenumber for the end of the function.
6541 SDB depends on this. */
6542 emit_line_note_force (filename, line);
6543
6544 /* Output the label for the actual return from the function,
6545 if one is expected. This happens either because a function epilogue
6546 is used instead of a return instruction, or because a return was done
6547 with a goto in order to run local cleanups, or because of pcc-style
6548 structure returning. */
6549
6550 if (return_label)
6551 {
6552 /* Before the return label, clobber the return registers so that
6553 they are not propogated live to the rest of the function. This
6554 can only happen with functions that drop through; if there had
6555 been a return statement, there would have either been a return
6556 rtx, or a jump to the return label. */
6557 clobber_return_register ();
6558
6559 emit_label (return_label);
6560 }
6561
6562 /* C++ uses this. */
6563 if (end_bindings)
6564 expand_end_bindings (0, 0, 0);
6565
6566 /* Now handle any leftover exception regions that may have been
6567 created for the parameters. */
6568 {
6569 rtx last = get_last_insn ();
6570 rtx label;
6571
6572 expand_leftover_cleanups ();
6573
6574 /* If there are any catch_clauses remaining, output them now. */
6575 emit_insns (catch_clauses);
6576 catch_clauses = catch_clauses_last = NULL_RTX;
6577 /* If the above emitted any code, may sure we jump around it. */
6578 if (last != get_last_insn ())
6579 {
6580 label = gen_label_rtx ();
6581 last = emit_jump_insn_after (gen_jump (label), last);
6582 last = emit_barrier_after (last);
6583 emit_label (label);
6584 }
6585 }
6586
6587 if (current_function_instrument_entry_exit)
6588 {
6589 rtx fun = DECL_RTL (current_function_decl);
6590 if (GET_CODE (fun) == MEM)
6591 fun = XEXP (fun, 0);
6592 else
6593 abort ();
6594 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6595 fun, Pmode,
6596 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6597 0,
6598 hard_frame_pointer_rtx),
6599 Pmode);
6600 }
6601
6602 /* If we had calls to alloca, and this machine needs
6603 an accurate stack pointer to exit the function,
6604 insert some code to save and restore the stack pointer. */
6605 #ifdef EXIT_IGNORE_STACK
6606 if (! EXIT_IGNORE_STACK)
6607 #endif
6608 if (current_function_calls_alloca)
6609 {
6610 rtx tem = 0;
6611
6612 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6613 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6614 }
6615
6616 /* If scalar return value was computed in a pseudo-reg,
6617 copy that to the hard return register. */
6618 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6619 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6620 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6621 >= FIRST_PSEUDO_REGISTER))
6622 {
6623 rtx real_decl_result;
6624
6625 #ifdef FUNCTION_OUTGOING_VALUE
6626 real_decl_result
6627 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6628 current_function_decl);
6629 #else
6630 real_decl_result
6631 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6632 current_function_decl);
6633 #endif
6634 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6635 /* If this is a BLKmode structure being returned in registers, then use
6636 the mode computed in expand_return. */
6637 if (GET_MODE (real_decl_result) == BLKmode)
6638 PUT_MODE (real_decl_result,
6639 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6640 emit_move_insn (real_decl_result,
6641 DECL_RTL (DECL_RESULT (current_function_decl)));
6642
6643 /* The delay slot scheduler assumes that current_function_return_rtx
6644 holds the hard register containing the return value, not a temporary
6645 pseudo. */
6646 current_function_return_rtx = real_decl_result;
6647 }
6648
6649 /* If returning a structure, arrange to return the address of the value
6650 in a place where debuggers expect to find it.
6651
6652 If returning a structure PCC style,
6653 the caller also depends on this value.
6654 And current_function_returns_pcc_struct is not necessarily set. */
6655 if (current_function_returns_struct
6656 || current_function_returns_pcc_struct)
6657 {
6658 rtx value_address =
6659 XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6660 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6661 #ifdef FUNCTION_OUTGOING_VALUE
6662 rtx outgoing
6663 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6664 current_function_decl);
6665 #else
6666 rtx outgoing
6667 = FUNCTION_VALUE (build_pointer_type (type),
6668 current_function_decl);
6669 #endif
6670
6671 /* Mark this as a function return value so integrate will delete the
6672 assignment and USE below when inlining this function. */
6673 REG_FUNCTION_VALUE_P (outgoing) = 1;
6674
6675 emit_move_insn (outgoing, value_address);
6676 }
6677
6678 /* ??? This should no longer be necessary since stupid is no longer with
6679 us, but there are some parts of the compiler (eg reload_combine, and
6680 sh mach_dep_reorg) that still try and compute their own lifetime info
6681 instead of using the general framework. */
6682 use_return_register ();
6683
6684 /* If this is an implementation of __throw, do what's necessary to
6685 communicate between __builtin_eh_return and the epilogue. */
6686 expand_eh_return ();
6687
6688 /* Output a return insn if we are using one.
6689 Otherwise, let the rtl chain end here, to drop through
6690 into the epilogue. */
6691
6692 #ifdef HAVE_return
6693 if (HAVE_return)
6694 {
6695 emit_jump_insn (gen_return ());
6696 emit_barrier ();
6697 }
6698 #endif
6699
6700 /* Fix up any gotos that jumped out to the outermost
6701 binding level of the function.
6702 Must follow emitting RETURN_LABEL. */
6703
6704 /* If you have any cleanups to do at this point,
6705 and they need to create temporary variables,
6706 then you will lose. */
6707 expand_fixups (get_insns ());
6708 }
6709 \f
6710 /* Extend a vector that records the INSN_UIDs of INSNS (either a
6711 sequence or a single insn). */
6712
6713 static void
6714 record_insns (insns, vecp)
6715 rtx insns;
6716 varray_type *vecp;
6717 {
6718 if (GET_CODE (insns) == SEQUENCE)
6719 {
6720 int len = XVECLEN (insns, 0);
6721 int i = VARRAY_SIZE (*vecp);
6722
6723 VARRAY_GROW (*vecp, i + len);
6724 while (--len >= 0)
6725 {
6726 VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
6727 ++i;
6728 }
6729 }
6730 else
6731 {
6732 int i = VARRAY_SIZE (*vecp);
6733 VARRAY_GROW (*vecp, i + 1);
6734 VARRAY_INT (*vecp, i) = INSN_UID (insns);
6735 }
6736 }
6737
6738 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6739
6740 static int
6741 contains (insn, vec)
6742 rtx insn;
6743 varray_type vec;
6744 {
6745 register int i, j;
6746
6747 if (GET_CODE (insn) == INSN
6748 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6749 {
6750 int count = 0;
6751 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6752 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
6753 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
6754 count++;
6755 return count;
6756 }
6757 else
6758 {
6759 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
6760 if (INSN_UID (insn) == VARRAY_INT (vec, j))
6761 return 1;
6762 }
6763 return 0;
6764 }
6765
6766 int
6767 prologue_epilogue_contains (insn)
6768 rtx insn;
6769 {
6770 if (contains (insn, prologue))
6771 return 1;
6772 if (contains (insn, epilogue))
6773 return 1;
6774 return 0;
6775 }
6776
6777 int
6778 sibcall_epilogue_contains (insn)
6779 rtx insn;
6780 {
6781 if (sibcall_epilogue)
6782 return contains (insn, sibcall_epilogue);
6783 return 0;
6784 }
6785
6786 #ifdef HAVE_return
6787 /* Insert gen_return at the end of block BB. This also means updating
6788 block_for_insn appropriately. */
6789
6790 static void
6791 emit_return_into_block (bb, line_note)
6792 basic_block bb;
6793 rtx line_note;
6794 {
6795 rtx p, end;
6796
6797 p = NEXT_INSN (bb->end);
6798 end = emit_jump_insn_after (gen_return (), bb->end);
6799 if (line_note)
6800 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
6801 NOTE_LINE_NUMBER (line_note), bb->end);
6802
6803 while (1)
6804 {
6805 set_block_for_insn (p, bb);
6806 if (p == bb->end)
6807 break;
6808 p = PREV_INSN (p);
6809 }
6810 bb->end = end;
6811 }
6812 #endif /* HAVE_return */
6813
6814 #ifdef HAVE_epilogue
6815
6816 /* Modify SEQ, a SEQUENCE that is part of the epilogue, to no modifications
6817 to the stack pointer. */
6818
6819 static void
6820 keep_stack_depressed (seq)
6821 rtx seq;
6822 {
6823 int i;
6824 rtx sp_from_reg = 0;
6825 int sp_modified_unknown = 0;
6826
6827 /* If the epilogue is just a single instruction, it's OK as is */
6828
6829 if (GET_CODE (seq) != SEQUENCE) return;
6830
6831 /* Scan all insns in SEQ looking for ones that modified the stack
6832 pointer. Record if it modified the stack pointer by copying it
6833 from the frame pointer or if it modified it in some other way.
6834 Then modify any subsequent stack pointer references to take that
6835 into account. We start by only allowing SP to be copied from a
6836 register (presumably FP) and then be subsequently referenced. */
6837
6838 for (i = 0; i < XVECLEN (seq, 0); i++)
6839 {
6840 rtx insn = XVECEXP (seq, 0, i);
6841
6842 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6843 continue;
6844
6845 if (reg_set_p (stack_pointer_rtx, insn))
6846 {
6847 rtx set = single_set (insn);
6848
6849 /* If SP is set as a side-effect, we can't support this. */
6850 if (set == 0)
6851 abort ();
6852
6853 if (GET_CODE (SET_SRC (set)) == REG)
6854 sp_from_reg = SET_SRC (set);
6855 else
6856 sp_modified_unknown = 1;
6857
6858 /* Don't allow the SP modification to happen. */
6859 PUT_CODE (insn, NOTE);
6860 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
6861 NOTE_SOURCE_FILE (insn) = 0;
6862 }
6863 else if (reg_referenced_p (stack_pointer_rtx, PATTERN (insn)))
6864 {
6865 if (sp_modified_unknown)
6866 abort ();
6867
6868 else if (sp_from_reg != 0)
6869 PATTERN (insn)
6870 = replace_rtx (PATTERN (insn), stack_pointer_rtx, sp_from_reg);
6871 }
6872 }
6873 }
6874 #endif
6875
6876 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6877 this into place with notes indicating where the prologue ends and where
6878 the epilogue begins. Update the basic block information when possible. */
6879
6880 void
6881 thread_prologue_and_epilogue_insns (f)
6882 rtx f ATTRIBUTE_UNUSED;
6883 {
6884 int inserted = 0;
6885 edge e;
6886 rtx seq;
6887 #ifdef HAVE_prologue
6888 rtx prologue_end = NULL_RTX;
6889 #endif
6890 #if defined (HAVE_epilogue) || defined(HAVE_return)
6891 rtx epilogue_end = NULL_RTX;
6892 #endif
6893
6894 #ifdef HAVE_prologue
6895 if (HAVE_prologue)
6896 {
6897 start_sequence ();
6898 seq = gen_prologue ();
6899 emit_insn (seq);
6900
6901 /* Retain a map of the prologue insns. */
6902 if (GET_CODE (seq) != SEQUENCE)
6903 seq = get_insns ();
6904 record_insns (seq, &prologue);
6905 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6906
6907 seq = gen_sequence ();
6908 end_sequence ();
6909
6910 /* If optimization is off, and perhaps in an empty function,
6911 the entry block will have no successors. */
6912 if (ENTRY_BLOCK_PTR->succ)
6913 {
6914 /* Can't deal with multiple successsors of the entry block. */
6915 if (ENTRY_BLOCK_PTR->succ->succ_next)
6916 abort ();
6917
6918 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6919 inserted = 1;
6920 }
6921 else
6922 emit_insn_after (seq, f);
6923 }
6924 #endif
6925
6926 /* If the exit block has no non-fake predecessors, we don't need
6927 an epilogue. */
6928 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
6929 if ((e->flags & EDGE_FAKE) == 0)
6930 break;
6931 if (e == NULL)
6932 goto epilogue_done;
6933
6934 #ifdef HAVE_return
6935 if (optimize && HAVE_return)
6936 {
6937 /* If we're allowed to generate a simple return instruction,
6938 then by definition we don't need a full epilogue. Examine
6939 the block that falls through to EXIT. If it does not
6940 contain any code, examine its predecessors and try to
6941 emit (conditional) return instructions. */
6942
6943 basic_block last;
6944 edge e_next;
6945 rtx label;
6946
6947 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
6948 if (e->flags & EDGE_FALLTHRU)
6949 break;
6950 if (e == NULL)
6951 goto epilogue_done;
6952 last = e->src;
6953
6954 /* Verify that there are no active instructions in the last block. */
6955 label = last->end;
6956 while (label && GET_CODE (label) != CODE_LABEL)
6957 {
6958 if (active_insn_p (label))
6959 break;
6960 label = PREV_INSN (label);
6961 }
6962
6963 if (last->head == label && GET_CODE (label) == CODE_LABEL)
6964 {
6965 rtx epilogue_line_note = NULL_RTX;
6966
6967 /* Locate the line number associated with the closing brace,
6968 if we can find one. */
6969 for (seq = get_last_insn ();
6970 seq && ! active_insn_p (seq);
6971 seq = PREV_INSN (seq))
6972 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
6973 {
6974 epilogue_line_note = seq;
6975 break;
6976 }
6977
6978 for (e = last->pred; e; e = e_next)
6979 {
6980 basic_block bb = e->src;
6981 rtx jump;
6982
6983 e_next = e->pred_next;
6984 if (bb == ENTRY_BLOCK_PTR)
6985 continue;
6986
6987 jump = bb->end;
6988 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
6989 continue;
6990
6991 /* If we have an unconditional jump, we can replace that
6992 with a simple return instruction. */
6993 if (simplejump_p (jump))
6994 {
6995 emit_return_into_block (bb, epilogue_line_note);
6996 flow_delete_insn (jump);
6997 }
6998
6999 /* If we have a conditional jump, we can try to replace
7000 that with a conditional return instruction. */
7001 else if (condjump_p (jump))
7002 {
7003 rtx ret, *loc;
7004
7005 ret = SET_SRC (PATTERN (jump));
7006 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7007 loc = &XEXP (ret, 1);
7008 else
7009 loc = &XEXP (ret, 2);
7010 ret = gen_rtx_RETURN (VOIDmode);
7011
7012 if (! validate_change (jump, loc, ret, 0))
7013 continue;
7014 if (JUMP_LABEL (jump))
7015 LABEL_NUSES (JUMP_LABEL (jump))--;
7016
7017 /* If this block has only one successor, it both jumps
7018 and falls through to the fallthru block, so we can't
7019 delete the edge. */
7020 if (bb->succ->succ_next == NULL)
7021 continue;
7022 }
7023 else
7024 continue;
7025
7026 /* Fix up the CFG for the successful change we just made. */
7027 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7028 }
7029
7030 /* Emit a return insn for the exit fallthru block. Whether
7031 this is still reachable will be determined later. */
7032
7033 emit_barrier_after (last->end);
7034 emit_return_into_block (last, epilogue_line_note);
7035 epilogue_end = last->end;
7036 goto epilogue_done;
7037 }
7038 }
7039 #endif
7040 #ifdef HAVE_epilogue
7041 if (HAVE_epilogue)
7042 {
7043 /* Find the edge that falls through to EXIT. Other edges may exist
7044 due to RETURN instructions, but those don't need epilogues.
7045 There really shouldn't be a mixture -- either all should have
7046 been converted or none, however... */
7047
7048 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7049 if (e->flags & EDGE_FALLTHRU)
7050 break;
7051 if (e == NULL)
7052 goto epilogue_done;
7053
7054 start_sequence ();
7055 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7056
7057 seq = gen_epilogue ();
7058
7059 /* If this function returns with the stack depressed, massage
7060 the epilogue to actually do that. */
7061 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7062 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7063 keep_stack_depressed (seq);
7064
7065 emit_jump_insn (seq);
7066
7067 /* Retain a map of the epilogue insns. */
7068 if (GET_CODE (seq) != SEQUENCE)
7069 seq = get_insns ();
7070 record_insns (seq, &epilogue);
7071
7072 seq = gen_sequence ();
7073 end_sequence ();
7074
7075 insert_insn_on_edge (seq, e);
7076 inserted = 1;
7077 }
7078 #endif
7079 epilogue_done:
7080
7081 if (inserted)
7082 commit_edge_insertions ();
7083
7084 #ifdef HAVE_sibcall_epilogue
7085 /* Emit sibling epilogues before any sibling call sites. */
7086 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7087 {
7088 basic_block bb = e->src;
7089 rtx insn = bb->end;
7090 rtx i;
7091 rtx newinsn;
7092
7093 if (GET_CODE (insn) != CALL_INSN
7094 || ! SIBLING_CALL_P (insn))
7095 continue;
7096
7097 start_sequence ();
7098 seq = gen_sibcall_epilogue ();
7099 end_sequence ();
7100
7101 i = PREV_INSN (insn);
7102 newinsn = emit_insn_before (seq, insn);
7103
7104 /* Update the UID to basic block map. */
7105 for (i = NEXT_INSN (i); i != insn; i = NEXT_INSN (i))
7106 set_block_for_insn (i, bb);
7107
7108 /* Retain a map of the epilogue insns. Used in life analysis to
7109 avoid getting rid of sibcall epilogue insns. */
7110 record_insns (GET_CODE (seq) == SEQUENCE
7111 ? seq : newinsn, &sibcall_epilogue);
7112 }
7113 #endif
7114
7115 #ifdef HAVE_prologue
7116 if (prologue_end)
7117 {
7118 rtx insn, prev;
7119
7120 /* GDB handles `break f' by setting a breakpoint on the first
7121 line note after the prologue. Which means (1) that if
7122 there are line number notes before where we inserted the
7123 prologue we should move them, and (2) we should generate a
7124 note before the end of the first basic block, if there isn't
7125 one already there. */
7126
7127 for (insn = prologue_end; insn; insn = prev)
7128 {
7129 prev = PREV_INSN (insn);
7130 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7131 {
7132 /* Note that we cannot reorder the first insn in the
7133 chain, since rest_of_compilation relies on that
7134 remaining constant. */
7135 if (prev == NULL)
7136 break;
7137 reorder_insns (insn, insn, prologue_end);
7138 }
7139 }
7140
7141 /* Find the last line number note in the first block. */
7142 for (insn = BASIC_BLOCK (0)->end;
7143 insn != prologue_end;
7144 insn = PREV_INSN (insn))
7145 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7146 break;
7147
7148 /* If we didn't find one, make a copy of the first line number
7149 we run across. */
7150 if (! insn)
7151 {
7152 for (insn = next_active_insn (prologue_end);
7153 insn;
7154 insn = PREV_INSN (insn))
7155 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7156 {
7157 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7158 NOTE_LINE_NUMBER (insn),
7159 prologue_end);
7160 break;
7161 }
7162 }
7163 }
7164 #endif
7165 #ifdef HAVE_epilogue
7166 if (epilogue_end)
7167 {
7168 rtx insn, next;
7169
7170 /* Similarly, move any line notes that appear after the epilogue.
7171 There is no need, however, to be quite so anal about the existance
7172 of such a note. */
7173 for (insn = epilogue_end; insn; insn = next)
7174 {
7175 next = NEXT_INSN (insn);
7176 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7177 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7178 }
7179 }
7180 #endif
7181 }
7182
7183 /* Reposition the prologue-end and epilogue-begin notes after instruction
7184 scheduling and delayed branch scheduling. */
7185
7186 void
7187 reposition_prologue_and_epilogue_notes (f)
7188 rtx f ATTRIBUTE_UNUSED;
7189 {
7190 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7191 int len;
7192
7193 if ((len = VARRAY_SIZE (prologue)) > 0)
7194 {
7195 register rtx insn, note = 0;
7196
7197 /* Scan from the beginning until we reach the last prologue insn.
7198 We apparently can't depend on basic_block_{head,end} after
7199 reorg has run. */
7200 for (insn = f; len && insn; insn = NEXT_INSN (insn))
7201 {
7202 if (GET_CODE (insn) == NOTE)
7203 {
7204 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7205 note = insn;
7206 }
7207 else if ((len -= contains (insn, prologue)) == 0)
7208 {
7209 rtx next;
7210 /* Find the prologue-end note if we haven't already, and
7211 move it to just after the last prologue insn. */
7212 if (note == 0)
7213 {
7214 for (note = insn; (note = NEXT_INSN (note));)
7215 if (GET_CODE (note) == NOTE
7216 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7217 break;
7218 }
7219
7220 next = NEXT_INSN (note);
7221
7222 /* Whether or not we can depend on BLOCK_HEAD,
7223 attempt to keep it up-to-date. */
7224 if (BLOCK_HEAD (0) == note)
7225 BLOCK_HEAD (0) = next;
7226
7227 remove_insn (note);
7228 add_insn_after (note, insn);
7229 }
7230 }
7231 }
7232
7233 if ((len = VARRAY_SIZE (epilogue)) > 0)
7234 {
7235 register rtx insn, note = 0;
7236
7237 /* Scan from the end until we reach the first epilogue insn.
7238 We apparently can't depend on basic_block_{head,end} after
7239 reorg has run. */
7240 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7241 {
7242 if (GET_CODE (insn) == NOTE)
7243 {
7244 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7245 note = insn;
7246 }
7247 else if ((len -= contains (insn, epilogue)) == 0)
7248 {
7249 /* Find the epilogue-begin note if we haven't already, and
7250 move it to just before the first epilogue insn. */
7251 if (note == 0)
7252 {
7253 for (note = insn; (note = PREV_INSN (note));)
7254 if (GET_CODE (note) == NOTE
7255 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7256 break;
7257 }
7258
7259 /* Whether or not we can depend on BLOCK_HEAD,
7260 attempt to keep it up-to-date. */
7261 if (n_basic_blocks
7262 && BLOCK_HEAD (n_basic_blocks-1) == insn)
7263 BLOCK_HEAD (n_basic_blocks-1) = note;
7264
7265 remove_insn (note);
7266 add_insn_before (note, insn);
7267 }
7268 }
7269 }
7270 #endif /* HAVE_prologue or HAVE_epilogue */
7271 }
7272
7273 /* Mark T for GC. */
7274
7275 static void
7276 mark_temp_slot (t)
7277 struct temp_slot *t;
7278 {
7279 while (t)
7280 {
7281 ggc_mark_rtx (t->slot);
7282 ggc_mark_rtx (t->address);
7283 ggc_mark_tree (t->rtl_expr);
7284
7285 t = t->next;
7286 }
7287 }
7288
7289 /* Mark P for GC. */
7290
7291 static void
7292 mark_function_status (p)
7293 struct function *p;
7294 {
7295 int i;
7296 rtx *r;
7297
7298 if (p == 0)
7299 return;
7300
7301 ggc_mark_rtx (p->arg_offset_rtx);
7302
7303 if (p->x_parm_reg_stack_loc)
7304 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
7305 i > 0; --i, ++r)
7306 ggc_mark_rtx (*r);
7307
7308 ggc_mark_rtx (p->return_rtx);
7309 ggc_mark_rtx (p->x_cleanup_label);
7310 ggc_mark_rtx (p->x_return_label);
7311 ggc_mark_rtx (p->x_save_expr_regs);
7312 ggc_mark_rtx (p->x_stack_slot_list);
7313 ggc_mark_rtx (p->x_parm_birth_insn);
7314 ggc_mark_rtx (p->x_tail_recursion_label);
7315 ggc_mark_rtx (p->x_tail_recursion_reentry);
7316 ggc_mark_rtx (p->internal_arg_pointer);
7317 ggc_mark_rtx (p->x_arg_pointer_save_area);
7318 ggc_mark_tree (p->x_rtl_expr_chain);
7319 ggc_mark_rtx (p->x_last_parm_insn);
7320 ggc_mark_tree (p->x_context_display);
7321 ggc_mark_tree (p->x_trampoline_list);
7322 ggc_mark_rtx (p->epilogue_delay_list);
7323
7324 mark_temp_slot (p->x_temp_slots);
7325
7326 {
7327 struct var_refs_queue *q = p->fixup_var_refs_queue;
7328 while (q)
7329 {
7330 ggc_mark_rtx (q->modified);
7331 q = q->next;
7332 }
7333 }
7334
7335 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
7336 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
7337 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
7338 ggc_mark_tree (p->x_nonlocal_labels);
7339 }
7340
7341 /* Mark the function chain ARG (which is really a struct function **)
7342 for GC. */
7343
7344 static void
7345 mark_function_chain (arg)
7346 void *arg;
7347 {
7348 struct function *f = *(struct function **) arg;
7349
7350 for (; f; f = f->next_global)
7351 {
7352 ggc_mark_tree (f->decl);
7353
7354 mark_function_status (f);
7355 mark_eh_status (f->eh);
7356 mark_stmt_status (f->stmt);
7357 mark_expr_status (f->expr);
7358 mark_emit_status (f->emit);
7359 mark_varasm_status (f->varasm);
7360
7361 if (mark_machine_status)
7362 (*mark_machine_status) (f);
7363 if (mark_lang_status)
7364 (*mark_lang_status) (f);
7365
7366 if (f->original_arg_vector)
7367 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7368 if (f->original_decl_initial)
7369 ggc_mark_tree (f->original_decl_initial);
7370 }
7371 }
7372
7373 /* Called once, at initialization, to initialize function.c. */
7374
7375 void
7376 init_function_once ()
7377 {
7378 ggc_add_root (&all_functions, 1, sizeof all_functions,
7379 mark_function_chain);
7380
7381 VARRAY_INT_INIT (prologue, 0, "prologue");
7382 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7383 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
7384 }
This page took 0.362666 seconds and 6 git commands to generate.