]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
final.c: Move the declaration profile_label_no to ...
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59 #include "hash.h"
60 #include "ggc.h"
61 #include "tm_p.h"
62
63 #ifndef ACCUMULATE_OUTGOING_ARGS
64 #define ACCUMULATE_OUTGOING_ARGS 0
65 #endif
66
67 #ifndef TRAMPOLINE_ALIGNMENT
68 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
69 #endif
70
71 #ifndef LOCAL_ALIGNMENT
72 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
73 #endif
74
75 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
76 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
77 #endif
78
79 /* Some systems use __main in a way incompatible with its use in gcc, in these
80 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
81 give the same symbol without quotes for an alternative entry point. You
82 must define both, or neither. */
83 #ifndef NAME__MAIN
84 #define NAME__MAIN "__main"
85 #define SYMBOL__MAIN __main
86 #endif
87
88 /* Round a value to the lowest integer less than it that is a multiple of
89 the required alignment. Avoid using division in case the value is
90 negative. Assume the alignment is a power of two. */
91 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
92
93 /* Similar, but round to the next highest integer that meets the
94 alignment. */
95 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
96
97 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
98 during rtl generation. If they are different register numbers, this is
99 always true. It may also be true if
100 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
101 generation. See fix_lexical_addr for details. */
102
103 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
104 #define NEED_SEPARATE_AP
105 #endif
106
107 /* Nonzero if function being compiled doesn't contain any calls
108 (ignoring the prologue and epilogue). This is set prior to
109 local register allocation and is valid for the remaining
110 compiler passes. */
111 int current_function_is_leaf;
112
113 /* Nonzero if function being compiled doesn't contain any instructions
114 that can throw an exception. This is set prior to final. */
115
116 int current_function_nothrow;
117
118 /* Nonzero if function being compiled doesn't modify the stack pointer
119 (ignoring the prologue and epilogue). This is only valid after
120 life_analysis has run. */
121 int current_function_sp_is_unchanging;
122
123 /* Nonzero if the function being compiled is a leaf function which only
124 uses leaf registers. This is valid after reload (specifically after
125 sched2) and is useful only if the port defines LEAF_REGISTERS. */
126 int current_function_uses_only_leaf_regs;
127
128 /* Nonzero once virtual register instantiation has been done.
129 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
130 static int virtuals_instantiated;
131
132 /* These variables hold pointers to functions to create and destroy
133 target specific, per-function data structures. */
134 void (*init_machine_status) PARAMS ((struct function *));
135 void (*free_machine_status) PARAMS ((struct function *));
136 /* This variable holds a pointer to a function to register any
137 data items in the target specific, per-function data structure
138 that will need garbage collection. */
139 void (*mark_machine_status) PARAMS ((struct function *));
140
141 /* Likewise, but for language-specific data. */
142 void (*init_lang_status) PARAMS ((struct function *));
143 void (*save_lang_status) PARAMS ((struct function *));
144 void (*restore_lang_status) PARAMS ((struct function *));
145 void (*mark_lang_status) PARAMS ((struct function *));
146 void (*free_lang_status) PARAMS ((struct function *));
147
148 /* The FUNCTION_DECL for an inline function currently being expanded. */
149 tree inline_function_decl;
150
151 /* The currently compiled function. */
152 struct function *cfun = 0;
153
154 /* Global list of all compiled functions. */
155 struct function *all_functions = 0;
156
157 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
158 static varray_type prologue;
159 static varray_type epilogue;
160
161 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
162 in this function. */
163 static varray_type sibcall_epilogue;
164 \f
165 /* In order to evaluate some expressions, such as function calls returning
166 structures in memory, we need to temporarily allocate stack locations.
167 We record each allocated temporary in the following structure.
168
169 Associated with each temporary slot is a nesting level. When we pop up
170 one level, all temporaries associated with the previous level are freed.
171 Normally, all temporaries are freed after the execution of the statement
172 in which they were created. However, if we are inside a ({...}) grouping,
173 the result may be in a temporary and hence must be preserved. If the
174 result could be in a temporary, we preserve it if we can determine which
175 one it is in. If we cannot determine which temporary may contain the
176 result, all temporaries are preserved. A temporary is preserved by
177 pretending it was allocated at the previous nesting level.
178
179 Automatic variables are also assigned temporary slots, at the nesting
180 level where they are defined. They are marked a "kept" so that
181 free_temp_slots will not free them. */
182
183 struct temp_slot
184 {
185 /* Points to next temporary slot. */
186 struct temp_slot *next;
187 /* The rtx to used to reference the slot. */
188 rtx slot;
189 /* The rtx used to represent the address if not the address of the
190 slot above. May be an EXPR_LIST if multiple addresses exist. */
191 rtx address;
192 /* The alignment (in bits) of the slot. */
193 int align;
194 /* The size, in units, of the slot. */
195 HOST_WIDE_INT size;
196 /* The type of the object in the slot, or zero if it doesn't correspond
197 to a type. We use this to determine whether a slot can be reused.
198 It can be reused if objects of the type of the new slot will always
199 conflict with objects of the type of the old slot. */
200 tree type;
201 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
202 tree rtl_expr;
203 /* Non-zero if this temporary is currently in use. */
204 char in_use;
205 /* Non-zero if this temporary has its address taken. */
206 char addr_taken;
207 /* Nesting level at which this slot is being used. */
208 int level;
209 /* Non-zero if this should survive a call to free_temp_slots. */
210 int keep;
211 /* The offset of the slot from the frame_pointer, including extra space
212 for alignment. This info is for combine_temp_slots. */
213 HOST_WIDE_INT base_offset;
214 /* The size of the slot, including extra space for alignment. This
215 info is for combine_temp_slots. */
216 HOST_WIDE_INT full_size;
217 };
218 \f
219 /* This structure is used to record MEMs or pseudos used to replace VAR, any
220 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
221 maintain this list in case two operands of an insn were required to match;
222 in that case we must ensure we use the same replacement. */
223
224 struct fixup_replacement
225 {
226 rtx old;
227 rtx new;
228 struct fixup_replacement *next;
229 };
230
231 struct insns_for_mem_entry {
232 /* The KEY in HE will be a MEM. */
233 struct hash_entry he;
234 /* These are the INSNS which reference the MEM. */
235 rtx insns;
236 };
237
238 /* Forward declarations. */
239
240 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
241 int, struct function *));
242 static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode,
243 HOST_WIDE_INT, int, tree));
244 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
245 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
246 enum machine_mode, enum machine_mode,
247 int, unsigned int, int,
248 struct hash_table *));
249 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
250 enum machine_mode,
251 struct hash_table *));
252 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
253 struct hash_table *));
254 static struct fixup_replacement
255 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
256 static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
257 int, int));
258 static void fixup_var_refs_insns_with_hash
259 PARAMS ((struct hash_table *, rtx,
260 enum machine_mode, int));
261 static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
262 int, int));
263 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
264 struct fixup_replacement **));
265 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
266 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
267 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
268 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
269 static void instantiate_decls PARAMS ((tree, int));
270 static void instantiate_decls_1 PARAMS ((tree, int));
271 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
272 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
273 static void delete_handlers PARAMS ((void));
274 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
275 struct args_size *));
276 #ifndef ARGS_GROW_DOWNWARD
277 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
278 tree));
279 #endif
280 static rtx round_trampoline_addr PARAMS ((rtx));
281 static rtx adjust_trampoline_addr PARAMS ((rtx));
282 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
283 static void reorder_blocks_0 PARAMS ((rtx));
284 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
285 static tree blocks_nreverse PARAMS ((tree));
286 static int all_blocks PARAMS ((tree, tree *));
287 static tree *get_block_vector PARAMS ((tree, int *));
288 /* We always define `record_insns' even if its not used so that we
289 can always export `prologue_epilogue_contains'. */
290 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
291 static int contains PARAMS ((rtx, varray_type));
292 #ifdef HAVE_return
293 static void emit_return_into_block PARAMS ((basic_block, rtx));
294 #endif
295 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
296 static boolean purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
297 struct hash_table *));
298 static void purge_single_hard_subreg_set PARAMS ((rtx));
299 #ifdef HAVE_epilogue
300 static void keep_stack_depressed PARAMS ((rtx));
301 #endif
302 static int is_addressof PARAMS ((rtx *, void *));
303 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
304 struct hash_table *,
305 hash_table_key));
306 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
307 static boolean insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
308 static int insns_for_mem_walk PARAMS ((rtx *, void *));
309 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
310 static void mark_temp_slot PARAMS ((struct temp_slot *));
311 static void mark_function_status PARAMS ((struct function *));
312 static void mark_function_chain PARAMS ((void *));
313 static void prepare_function_start PARAMS ((void));
314 static void do_clobber_return_reg PARAMS ((rtx, void *));
315 static void do_use_return_reg PARAMS ((rtx, void *));
316 \f
317 /* Pointer to chain of `struct function' for containing functions. */
318 struct function *outer_function_chain;
319
320 /* Given a function decl for a containing function,
321 return the `struct function' for it. */
322
323 struct function *
324 find_function_data (decl)
325 tree decl;
326 {
327 struct function *p;
328
329 for (p = outer_function_chain; p; p = p->next)
330 if (p->decl == decl)
331 return p;
332
333 abort ();
334 }
335
336 /* Save the current context for compilation of a nested function.
337 This is called from language-specific code. The caller should use
338 the save_lang_status callback to save any language-specific state,
339 since this function knows only about language-independent
340 variables. */
341
342 void
343 push_function_context_to (context)
344 tree context;
345 {
346 struct function *p, *context_data;
347
348 if (context)
349 {
350 context_data = (context == current_function_decl
351 ? cfun
352 : find_function_data (context));
353 context_data->contains_functions = 1;
354 }
355
356 if (cfun == 0)
357 init_dummy_function_start ();
358 p = cfun;
359
360 p->next = outer_function_chain;
361 outer_function_chain = p;
362 p->fixup_var_refs_queue = 0;
363
364 if (save_lang_status)
365 (*save_lang_status) (p);
366
367 cfun = 0;
368 }
369
370 void
371 push_function_context ()
372 {
373 push_function_context_to (current_function_decl);
374 }
375
376 /* Restore the last saved context, at the end of a nested function.
377 This function is called from language-specific code. */
378
379 void
380 pop_function_context_from (context)
381 tree context ATTRIBUTE_UNUSED;
382 {
383 struct function *p = outer_function_chain;
384 struct var_refs_queue *queue;
385 struct var_refs_queue *next;
386
387 cfun = p;
388 outer_function_chain = p->next;
389
390 current_function_decl = p->decl;
391 reg_renumber = 0;
392
393 restore_emit_status (p);
394
395 if (restore_lang_status)
396 (*restore_lang_status) (p);
397
398 /* Finish doing put_var_into_stack for any of our variables
399 which became addressable during the nested function. */
400 for (queue = p->fixup_var_refs_queue; queue; queue = next)
401 {
402 next = queue->next;
403 fixup_var_refs (queue->modified, queue->promoted_mode,
404 queue->unsignedp, 0);
405 free (queue);
406 }
407 p->fixup_var_refs_queue = 0;
408
409 /* Reset variables that have known state during rtx generation. */
410 rtx_equal_function_value_matters = 1;
411 virtuals_instantiated = 0;
412 generating_concat_p = 1;
413 }
414
415 void
416 pop_function_context ()
417 {
418 pop_function_context_from (current_function_decl);
419 }
420
421 /* Clear out all parts of the state in F that can safely be discarded
422 after the function has been parsed, but not compiled, to let
423 garbage collection reclaim the memory. */
424
425 void
426 free_after_parsing (f)
427 struct function *f;
428 {
429 /* f->expr->forced_labels is used by code generation. */
430 /* f->emit->regno_reg_rtx is used by code generation. */
431 /* f->varasm is used by code generation. */
432 /* f->eh->eh_return_stub_label is used by code generation. */
433
434 if (free_lang_status)
435 (*free_lang_status) (f);
436 free_stmt_status (f);
437 }
438
439 /* Clear out all parts of the state in F that can safely be discarded
440 after the function has been compiled, to let garbage collection
441 reclaim the memory. */
442
443 void
444 free_after_compilation (f)
445 struct function *f;
446 {
447 struct temp_slot *ts;
448 struct temp_slot *next;
449
450 free_eh_status (f);
451 free_expr_status (f);
452 free_emit_status (f);
453 free_varasm_status (f);
454
455 if (free_machine_status)
456 (*free_machine_status) (f);
457
458 if (f->x_parm_reg_stack_loc)
459 free (f->x_parm_reg_stack_loc);
460
461 for (ts = f->x_temp_slots; ts; ts = next)
462 {
463 next = ts->next;
464 free (ts);
465 }
466 f->x_temp_slots = NULL;
467
468 f->arg_offset_rtx = NULL;
469 f->return_rtx = NULL;
470 f->internal_arg_pointer = NULL;
471 f->x_nonlocal_labels = NULL;
472 f->x_nonlocal_goto_handler_slots = NULL;
473 f->x_nonlocal_goto_handler_labels = NULL;
474 f->x_nonlocal_goto_stack_level = NULL;
475 f->x_cleanup_label = NULL;
476 f->x_return_label = NULL;
477 f->x_save_expr_regs = NULL;
478 f->x_stack_slot_list = NULL;
479 f->x_rtl_expr_chain = NULL;
480 f->x_tail_recursion_label = NULL;
481 f->x_tail_recursion_reentry = NULL;
482 f->x_arg_pointer_save_area = NULL;
483 f->x_clobber_return_insn = NULL;
484 f->x_context_display = NULL;
485 f->x_trampoline_list = NULL;
486 f->x_parm_birth_insn = NULL;
487 f->x_last_parm_insn = NULL;
488 f->x_parm_reg_stack_loc = NULL;
489 f->fixup_var_refs_queue = NULL;
490 f->original_arg_vector = NULL;
491 f->original_decl_initial = NULL;
492 f->inl_last_parm_insn = NULL;
493 f->epilogue_delay_list = NULL;
494 }
495 \f
496 /* Allocate fixed slots in the stack frame of the current function. */
497
498 /* Return size needed for stack frame based on slots so far allocated in
499 function F.
500 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
501 the caller may have to do that. */
502
503 HOST_WIDE_INT
504 get_func_frame_size (f)
505 struct function *f;
506 {
507 #ifdef FRAME_GROWS_DOWNWARD
508 return -f->x_frame_offset;
509 #else
510 return f->x_frame_offset;
511 #endif
512 }
513
514 /* Return size needed for stack frame based on slots so far allocated.
515 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
516 the caller may have to do that. */
517 HOST_WIDE_INT
518 get_frame_size ()
519 {
520 return get_func_frame_size (cfun);
521 }
522
523 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
524 with machine mode MODE.
525
526 ALIGN controls the amount of alignment for the address of the slot:
527 0 means according to MODE,
528 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
529 positive specifies alignment boundary in bits.
530
531 We do not round to stack_boundary here.
532
533 FUNCTION specifies the function to allocate in. */
534
535 static rtx
536 assign_stack_local_1 (mode, size, align, function)
537 enum machine_mode mode;
538 HOST_WIDE_INT size;
539 int align;
540 struct function *function;
541 {
542 register rtx x, addr;
543 int bigend_correction = 0;
544 int alignment;
545
546 if (align == 0)
547 {
548 tree type;
549
550 if (mode == BLKmode)
551 alignment = BIGGEST_ALIGNMENT;
552 else
553 alignment = GET_MODE_ALIGNMENT (mode);
554
555 /* Allow the target to (possibly) increase the alignment of this
556 stack slot. */
557 type = type_for_mode (mode, 0);
558 if (type)
559 alignment = LOCAL_ALIGNMENT (type, alignment);
560
561 alignment /= BITS_PER_UNIT;
562 }
563 else if (align == -1)
564 {
565 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
566 size = CEIL_ROUND (size, alignment);
567 }
568 else
569 alignment = align / BITS_PER_UNIT;
570
571 #ifdef FRAME_GROWS_DOWNWARD
572 function->x_frame_offset -= size;
573 #endif
574
575 /* Ignore alignment we can't do with expected alignment of the boundary. */
576 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
577 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
578
579 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
580 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
581
582 /* Round frame offset to that alignment.
583 We must be careful here, since FRAME_OFFSET might be negative and
584 division with a negative dividend isn't as well defined as we might
585 like. So we instead assume that ALIGNMENT is a power of two and
586 use logical operations which are unambiguous. */
587 #ifdef FRAME_GROWS_DOWNWARD
588 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
589 #else
590 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
591 #endif
592
593 /* On a big-endian machine, if we are allocating more space than we will use,
594 use the least significant bytes of those that are allocated. */
595 if (BYTES_BIG_ENDIAN && mode != BLKmode)
596 bigend_correction = size - GET_MODE_SIZE (mode);
597
598 /* If we have already instantiated virtual registers, return the actual
599 address relative to the frame pointer. */
600 if (function == cfun && virtuals_instantiated)
601 addr = plus_constant (frame_pointer_rtx,
602 (frame_offset + bigend_correction
603 + STARTING_FRAME_OFFSET));
604 else
605 addr = plus_constant (virtual_stack_vars_rtx,
606 function->x_frame_offset + bigend_correction);
607
608 #ifndef FRAME_GROWS_DOWNWARD
609 function->x_frame_offset += size;
610 #endif
611
612 x = gen_rtx_MEM (mode, addr);
613
614 function->x_stack_slot_list
615 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
616
617 return x;
618 }
619
620 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
621 current function. */
622
623 rtx
624 assign_stack_local (mode, size, align)
625 enum machine_mode mode;
626 HOST_WIDE_INT size;
627 int align;
628 {
629 return assign_stack_local_1 (mode, size, align, cfun);
630 }
631 \f
632 /* Allocate a temporary stack slot and record it for possible later
633 reuse.
634
635 MODE is the machine mode to be given to the returned rtx.
636
637 SIZE is the size in units of the space required. We do no rounding here
638 since assign_stack_local will do any required rounding.
639
640 KEEP is 1 if this slot is to be retained after a call to
641 free_temp_slots. Automatic variables for a block are allocated
642 with this flag. KEEP is 2 if we allocate a longer term temporary,
643 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
644 if we are to allocate something at an inner level to be treated as
645 a variable in the block (e.g., a SAVE_EXPR).
646
647 TYPE is the type that will be used for the stack slot. */
648
649 static rtx
650 assign_stack_temp_for_type (mode, size, keep, type)
651 enum machine_mode mode;
652 HOST_WIDE_INT size;
653 int keep;
654 tree type;
655 {
656 int align;
657 struct temp_slot *p, *best_p = 0;
658
659 /* If SIZE is -1 it means that somebody tried to allocate a temporary
660 of a variable size. */
661 if (size == -1)
662 abort ();
663
664 if (mode == BLKmode)
665 align = BIGGEST_ALIGNMENT;
666 else
667 align = GET_MODE_ALIGNMENT (mode);
668
669 if (! type)
670 type = type_for_mode (mode, 0);
671
672 if (type)
673 align = LOCAL_ALIGNMENT (type, align);
674
675 /* Try to find an available, already-allocated temporary of the proper
676 mode which meets the size and alignment requirements. Choose the
677 smallest one with the closest alignment. */
678 for (p = temp_slots; p; p = p->next)
679 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
680 && ! p->in_use
681 && objects_must_conflict_p (p->type, type)
682 && (best_p == 0 || best_p->size > p->size
683 || (best_p->size == p->size && best_p->align > p->align)))
684 {
685 if (p->align == align && p->size == size)
686 {
687 best_p = 0;
688 break;
689 }
690 best_p = p;
691 }
692
693 /* Make our best, if any, the one to use. */
694 if (best_p)
695 {
696 /* If there are enough aligned bytes left over, make them into a new
697 temp_slot so that the extra bytes don't get wasted. Do this only
698 for BLKmode slots, so that we can be sure of the alignment. */
699 if (GET_MODE (best_p->slot) == BLKmode)
700 {
701 int alignment = best_p->align / BITS_PER_UNIT;
702 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
703
704 if (best_p->size - rounded_size >= alignment)
705 {
706 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
707 p->in_use = p->addr_taken = 0;
708 p->size = best_p->size - rounded_size;
709 p->base_offset = best_p->base_offset + rounded_size;
710 p->full_size = best_p->full_size - rounded_size;
711 p->slot = gen_rtx_MEM (BLKmode,
712 plus_constant (XEXP (best_p->slot, 0),
713 rounded_size));
714 p->align = best_p->align;
715 p->address = 0;
716 p->rtl_expr = 0;
717 p->type = best_p->type;
718 p->next = temp_slots;
719 temp_slots = p;
720
721 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
722 stack_slot_list);
723
724 best_p->size = rounded_size;
725 best_p->full_size = rounded_size;
726 }
727 }
728
729 p = best_p;
730 }
731
732 /* If we still didn't find one, make a new temporary. */
733 if (p == 0)
734 {
735 HOST_WIDE_INT frame_offset_old = frame_offset;
736
737 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
738
739 /* We are passing an explicit alignment request to assign_stack_local.
740 One side effect of that is assign_stack_local will not round SIZE
741 to ensure the frame offset remains suitably aligned.
742
743 So for requests which depended on the rounding of SIZE, we go ahead
744 and round it now. We also make sure ALIGNMENT is at least
745 BIGGEST_ALIGNMENT. */
746 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
747 abort();
748 p->slot = assign_stack_local (mode,
749 (mode == BLKmode
750 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
751 : size),
752 align);
753
754 p->align = align;
755
756 /* The following slot size computation is necessary because we don't
757 know the actual size of the temporary slot until assign_stack_local
758 has performed all the frame alignment and size rounding for the
759 requested temporary. Note that extra space added for alignment
760 can be either above or below this stack slot depending on which
761 way the frame grows. We include the extra space if and only if it
762 is above this slot. */
763 #ifdef FRAME_GROWS_DOWNWARD
764 p->size = frame_offset_old - frame_offset;
765 #else
766 p->size = size;
767 #endif
768
769 /* Now define the fields used by combine_temp_slots. */
770 #ifdef FRAME_GROWS_DOWNWARD
771 p->base_offset = frame_offset;
772 p->full_size = frame_offset_old - frame_offset;
773 #else
774 p->base_offset = frame_offset_old;
775 p->full_size = frame_offset - frame_offset_old;
776 #endif
777 p->address = 0;
778 p->next = temp_slots;
779 temp_slots = p;
780 }
781
782 p->in_use = 1;
783 p->addr_taken = 0;
784 p->rtl_expr = seq_rtl_expr;
785 p->type = type;
786
787 if (keep == 2)
788 {
789 p->level = target_temp_slot_level;
790 p->keep = 0;
791 }
792 else if (keep == 3)
793 {
794 p->level = var_temp_slot_level;
795 p->keep = 0;
796 }
797 else
798 {
799 p->level = temp_slot_level;
800 p->keep = keep;
801 }
802
803 /* We may be reusing an old slot, so clear any MEM flags that may have been
804 set from before. */
805 RTX_UNCHANGING_P (p->slot) = 0;
806 MEM_IN_STRUCT_P (p->slot) = 0;
807 MEM_SCALAR_P (p->slot) = 0;
808 MEM_VOLATILE_P (p->slot) = 0;
809
810 /* If we know the alias set for the memory that will be used, use
811 it. If there's no TYPE, then we don't know anything about the
812 alias set for the memory. */
813 if (type)
814 MEM_ALIAS_SET (p->slot) = get_alias_set (type);
815 else
816 MEM_ALIAS_SET (p->slot) = 0;
817
818 /* If a type is specified, set the relevant flags. */
819 if (type != 0)
820 {
821 RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
822 MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type);
823 MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
824 }
825
826 return p->slot;
827 }
828
829 /* Allocate a temporary stack slot and record it for possible later
830 reuse. First three arguments are same as in preceding function. */
831
832 rtx
833 assign_stack_temp (mode, size, keep)
834 enum machine_mode mode;
835 HOST_WIDE_INT size;
836 int keep;
837 {
838 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
839 }
840 \f
841 /* Assign a temporary of given TYPE.
842 KEEP is as for assign_stack_temp.
843 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
844 it is 0 if a register is OK.
845 DONT_PROMOTE is 1 if we should not promote values in register
846 to wider modes. */
847
848 rtx
849 assign_temp (type, keep, memory_required, dont_promote)
850 tree type;
851 int keep;
852 int memory_required;
853 int dont_promote ATTRIBUTE_UNUSED;
854 {
855 enum machine_mode mode = TYPE_MODE (type);
856 #ifndef PROMOTE_FOR_CALL_ONLY
857 int unsignedp = TREE_UNSIGNED (type);
858 #endif
859
860 if (mode == BLKmode || memory_required)
861 {
862 HOST_WIDE_INT size = int_size_in_bytes (type);
863 rtx tmp;
864
865 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
866 problems with allocating the stack space. */
867 if (size == 0)
868 size = 1;
869
870 /* Unfortunately, we don't yet know how to allocate variable-sized
871 temporaries. However, sometimes we have a fixed upper limit on
872 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
873 instead. This is the case for Chill variable-sized strings. */
874 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
875 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
876 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
877 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
878
879 tmp = assign_stack_temp_for_type (mode, size, keep, type);
880 return tmp;
881 }
882
883 #ifndef PROMOTE_FOR_CALL_ONLY
884 if (! dont_promote)
885 mode = promote_mode (type, mode, &unsignedp, 0);
886 #endif
887
888 return gen_reg_rtx (mode);
889 }
890 \f
891 /* Combine temporary stack slots which are adjacent on the stack.
892
893 This allows for better use of already allocated stack space. This is only
894 done for BLKmode slots because we can be sure that we won't have alignment
895 problems in this case. */
896
897 void
898 combine_temp_slots ()
899 {
900 struct temp_slot *p, *q;
901 struct temp_slot *prev_p, *prev_q;
902 int num_slots;
903
904 /* We can't combine slots, because the information about which slot
905 is in which alias set will be lost. */
906 if (flag_strict_aliasing)
907 return;
908
909 /* If there are a lot of temp slots, don't do anything unless
910 high levels of optimizaton. */
911 if (! flag_expensive_optimizations)
912 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
913 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
914 return;
915
916 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
917 {
918 int delete_p = 0;
919
920 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
921 for (q = p->next, prev_q = p; q; q = prev_q->next)
922 {
923 int delete_q = 0;
924 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
925 {
926 if (p->base_offset + p->full_size == q->base_offset)
927 {
928 /* Q comes after P; combine Q into P. */
929 p->size += q->size;
930 p->full_size += q->full_size;
931 delete_q = 1;
932 }
933 else if (q->base_offset + q->full_size == p->base_offset)
934 {
935 /* P comes after Q; combine P into Q. */
936 q->size += p->size;
937 q->full_size += p->full_size;
938 delete_p = 1;
939 break;
940 }
941 }
942 /* Either delete Q or advance past it. */
943 if (delete_q)
944 {
945 prev_q->next = q->next;
946 free (q);
947 }
948 else
949 prev_q = q;
950 }
951 /* Either delete P or advance past it. */
952 if (delete_p)
953 {
954 if (prev_p)
955 prev_p->next = p->next;
956 else
957 temp_slots = p->next;
958 }
959 else
960 prev_p = p;
961 }
962 }
963 \f
964 /* Find the temp slot corresponding to the object at address X. */
965
966 static struct temp_slot *
967 find_temp_slot_from_address (x)
968 rtx x;
969 {
970 struct temp_slot *p;
971 rtx next;
972
973 for (p = temp_slots; p; p = p->next)
974 {
975 if (! p->in_use)
976 continue;
977
978 else if (XEXP (p->slot, 0) == x
979 || p->address == x
980 || (GET_CODE (x) == PLUS
981 && XEXP (x, 0) == virtual_stack_vars_rtx
982 && GET_CODE (XEXP (x, 1)) == CONST_INT
983 && INTVAL (XEXP (x, 1)) >= p->base_offset
984 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
985 return p;
986
987 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
988 for (next = p->address; next; next = XEXP (next, 1))
989 if (XEXP (next, 0) == x)
990 return p;
991 }
992
993 /* If we have a sum involving a register, see if it points to a temp
994 slot. */
995 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
996 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
997 return p;
998 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
999 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1000 return p;
1001
1002 return 0;
1003 }
1004
1005 /* Indicate that NEW is an alternate way of referring to the temp slot
1006 that previously was known by OLD. */
1007
1008 void
1009 update_temp_slot_address (old, new)
1010 rtx old, new;
1011 {
1012 struct temp_slot *p;
1013
1014 if (rtx_equal_p (old, new))
1015 return;
1016
1017 p = find_temp_slot_from_address (old);
1018
1019 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1020 is a register, see if one operand of the PLUS is a temporary
1021 location. If so, NEW points into it. Otherwise, if both OLD and
1022 NEW are a PLUS and if there is a register in common between them.
1023 If so, try a recursive call on those values. */
1024 if (p == 0)
1025 {
1026 if (GET_CODE (old) != PLUS)
1027 return;
1028
1029 if (GET_CODE (new) == REG)
1030 {
1031 update_temp_slot_address (XEXP (old, 0), new);
1032 update_temp_slot_address (XEXP (old, 1), new);
1033 return;
1034 }
1035 else if (GET_CODE (new) != PLUS)
1036 return;
1037
1038 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1039 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1040 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1041 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1042 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1043 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1044 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1045 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1046
1047 return;
1048 }
1049
1050 /* Otherwise add an alias for the temp's address. */
1051 else if (p->address == 0)
1052 p->address = new;
1053 else
1054 {
1055 if (GET_CODE (p->address) != EXPR_LIST)
1056 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1057
1058 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1059 }
1060 }
1061
1062 /* If X could be a reference to a temporary slot, mark the fact that its
1063 address was taken. */
1064
1065 void
1066 mark_temp_addr_taken (x)
1067 rtx x;
1068 {
1069 struct temp_slot *p;
1070
1071 if (x == 0)
1072 return;
1073
1074 /* If X is not in memory or is at a constant address, it cannot be in
1075 a temporary slot. */
1076 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1077 return;
1078
1079 p = find_temp_slot_from_address (XEXP (x, 0));
1080 if (p != 0)
1081 p->addr_taken = 1;
1082 }
1083
1084 /* If X could be a reference to a temporary slot, mark that slot as
1085 belonging to the to one level higher than the current level. If X
1086 matched one of our slots, just mark that one. Otherwise, we can't
1087 easily predict which it is, so upgrade all of them. Kept slots
1088 need not be touched.
1089
1090 This is called when an ({...}) construct occurs and a statement
1091 returns a value in memory. */
1092
1093 void
1094 preserve_temp_slots (x)
1095 rtx x;
1096 {
1097 struct temp_slot *p = 0;
1098
1099 /* If there is no result, we still might have some objects whose address
1100 were taken, so we need to make sure they stay around. */
1101 if (x == 0)
1102 {
1103 for (p = temp_slots; p; p = p->next)
1104 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1105 p->level--;
1106
1107 return;
1108 }
1109
1110 /* If X is a register that is being used as a pointer, see if we have
1111 a temporary slot we know it points to. To be consistent with
1112 the code below, we really should preserve all non-kept slots
1113 if we can't find a match, but that seems to be much too costly. */
1114 if (GET_CODE (x) == REG && REG_POINTER (x))
1115 p = find_temp_slot_from_address (x);
1116
1117 /* If X is not in memory or is at a constant address, it cannot be in
1118 a temporary slot, but it can contain something whose address was
1119 taken. */
1120 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1121 {
1122 for (p = temp_slots; p; p = p->next)
1123 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1124 p->level--;
1125
1126 return;
1127 }
1128
1129 /* First see if we can find a match. */
1130 if (p == 0)
1131 p = find_temp_slot_from_address (XEXP (x, 0));
1132
1133 if (p != 0)
1134 {
1135 /* Move everything at our level whose address was taken to our new
1136 level in case we used its address. */
1137 struct temp_slot *q;
1138
1139 if (p->level == temp_slot_level)
1140 {
1141 for (q = temp_slots; q; q = q->next)
1142 if (q != p && q->addr_taken && q->level == p->level)
1143 q->level--;
1144
1145 p->level--;
1146 p->addr_taken = 0;
1147 }
1148 return;
1149 }
1150
1151 /* Otherwise, preserve all non-kept slots at this level. */
1152 for (p = temp_slots; p; p = p->next)
1153 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1154 p->level--;
1155 }
1156
1157 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1158 with that RTL_EXPR, promote it into a temporary slot at the present
1159 level so it will not be freed when we free slots made in the
1160 RTL_EXPR. */
1161
1162 void
1163 preserve_rtl_expr_result (x)
1164 rtx x;
1165 {
1166 struct temp_slot *p;
1167
1168 /* If X is not in memory or is at a constant address, it cannot be in
1169 a temporary slot. */
1170 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1171 return;
1172
1173 /* If we can find a match, move it to our level unless it is already at
1174 an upper level. */
1175 p = find_temp_slot_from_address (XEXP (x, 0));
1176 if (p != 0)
1177 {
1178 p->level = MIN (p->level, temp_slot_level);
1179 p->rtl_expr = 0;
1180 }
1181
1182 return;
1183 }
1184
1185 /* Free all temporaries used so far. This is normally called at the end
1186 of generating code for a statement. Don't free any temporaries
1187 currently in use for an RTL_EXPR that hasn't yet been emitted.
1188 We could eventually do better than this since it can be reused while
1189 generating the same RTL_EXPR, but this is complex and probably not
1190 worthwhile. */
1191
1192 void
1193 free_temp_slots ()
1194 {
1195 struct temp_slot *p;
1196
1197 for (p = temp_slots; p; p = p->next)
1198 if (p->in_use && p->level == temp_slot_level && ! p->keep
1199 && p->rtl_expr == 0)
1200 p->in_use = 0;
1201
1202 combine_temp_slots ();
1203 }
1204
1205 /* Free all temporary slots used in T, an RTL_EXPR node. */
1206
1207 void
1208 free_temps_for_rtl_expr (t)
1209 tree t;
1210 {
1211 struct temp_slot *p;
1212
1213 for (p = temp_slots; p; p = p->next)
1214 if (p->rtl_expr == t)
1215 {
1216 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1217 needs to be preserved. This can happen if a temporary in
1218 the RTL_EXPR was addressed; preserve_temp_slots will move
1219 the temporary into a higher level. */
1220 if (temp_slot_level <= p->level)
1221 p->in_use = 0;
1222 else
1223 p->rtl_expr = NULL_TREE;
1224 }
1225
1226 combine_temp_slots ();
1227 }
1228
1229 /* Mark all temporaries ever allocated in this function as not suitable
1230 for reuse until the current level is exited. */
1231
1232 void
1233 mark_all_temps_used ()
1234 {
1235 struct temp_slot *p;
1236
1237 for (p = temp_slots; p; p = p->next)
1238 {
1239 p->in_use = p->keep = 1;
1240 p->level = MIN (p->level, temp_slot_level);
1241 }
1242 }
1243
1244 /* Push deeper into the nesting level for stack temporaries. */
1245
1246 void
1247 push_temp_slots ()
1248 {
1249 temp_slot_level++;
1250 }
1251
1252 /* Likewise, but save the new level as the place to allocate variables
1253 for blocks. */
1254
1255 #if 0
1256 void
1257 push_temp_slots_for_block ()
1258 {
1259 push_temp_slots ();
1260
1261 var_temp_slot_level = temp_slot_level;
1262 }
1263
1264 /* Likewise, but save the new level as the place to allocate temporaries
1265 for TARGET_EXPRs. */
1266
1267 void
1268 push_temp_slots_for_target ()
1269 {
1270 push_temp_slots ();
1271
1272 target_temp_slot_level = temp_slot_level;
1273 }
1274
1275 /* Set and get the value of target_temp_slot_level. The only
1276 permitted use of these functions is to save and restore this value. */
1277
1278 int
1279 get_target_temp_slot_level ()
1280 {
1281 return target_temp_slot_level;
1282 }
1283
1284 void
1285 set_target_temp_slot_level (level)
1286 int level;
1287 {
1288 target_temp_slot_level = level;
1289 }
1290 #endif
1291
1292 /* Pop a temporary nesting level. All slots in use in the current level
1293 are freed. */
1294
1295 void
1296 pop_temp_slots ()
1297 {
1298 struct temp_slot *p;
1299
1300 for (p = temp_slots; p; p = p->next)
1301 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1302 p->in_use = 0;
1303
1304 combine_temp_slots ();
1305
1306 temp_slot_level--;
1307 }
1308
1309 /* Initialize temporary slots. */
1310
1311 void
1312 init_temp_slots ()
1313 {
1314 /* We have not allocated any temporaries yet. */
1315 temp_slots = 0;
1316 temp_slot_level = 0;
1317 var_temp_slot_level = 0;
1318 target_temp_slot_level = 0;
1319 }
1320 \f
1321 /* Retroactively move an auto variable from a register to a stack slot.
1322 This is done when an address-reference to the variable is seen. */
1323
1324 void
1325 put_var_into_stack (decl)
1326 tree decl;
1327 {
1328 register rtx reg;
1329 enum machine_mode promoted_mode, decl_mode;
1330 struct function *function = 0;
1331 tree context;
1332 int can_use_addressof;
1333 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1334 int usedp = (TREE_USED (decl)
1335 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1336
1337 context = decl_function_context (decl);
1338
1339 /* Get the current rtl used for this object and its original mode. */
1340 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1341
1342 /* No need to do anything if decl has no rtx yet
1343 since in that case caller is setting TREE_ADDRESSABLE
1344 and a stack slot will be assigned when the rtl is made. */
1345 if (reg == 0)
1346 return;
1347
1348 /* Get the declared mode for this object. */
1349 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1350 : DECL_MODE (decl));
1351 /* Get the mode it's actually stored in. */
1352 promoted_mode = GET_MODE (reg);
1353
1354 /* If this variable comes from an outer function,
1355 find that function's saved context. */
1356 if (context != current_function_decl && context != inline_function_decl)
1357 for (function = outer_function_chain; function; function = function->next)
1358 if (function->decl == context)
1359 break;
1360
1361 /* If this is a variable-size object with a pseudo to address it,
1362 put that pseudo into the stack, if the var is nonlocal. */
1363 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1364 && GET_CODE (reg) == MEM
1365 && GET_CODE (XEXP (reg, 0)) == REG
1366 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1367 {
1368 reg = XEXP (reg, 0);
1369 decl_mode = promoted_mode = GET_MODE (reg);
1370 }
1371
1372 can_use_addressof
1373 = (function == 0
1374 && optimize > 0
1375 /* FIXME make it work for promoted modes too */
1376 && decl_mode == promoted_mode
1377 #ifdef NON_SAVING_SETJMP
1378 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1379 #endif
1380 );
1381
1382 /* If we can't use ADDRESSOF, make sure we see through one we already
1383 generated. */
1384 if (! can_use_addressof && GET_CODE (reg) == MEM
1385 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1386 reg = XEXP (XEXP (reg, 0), 0);
1387
1388 /* Now we should have a value that resides in one or more pseudo regs. */
1389
1390 if (GET_CODE (reg) == REG)
1391 {
1392 /* If this variable lives in the current function and we don't need
1393 to put things in the stack for the sake of setjmp, try to keep it
1394 in a register until we know we actually need the address. */
1395 if (can_use_addressof)
1396 gen_mem_addressof (reg, decl);
1397 else
1398 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1399 decl_mode, volatilep, 0, usedp, 0);
1400 }
1401 else if (GET_CODE (reg) == CONCAT)
1402 {
1403 /* A CONCAT contains two pseudos; put them both in the stack.
1404 We do it so they end up consecutive.
1405 We fixup references to the parts only after we fixup references
1406 to the whole CONCAT, lest we do double fixups for the latter
1407 references. */
1408 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1409 tree part_type = type_for_mode (part_mode, 0);
1410 rtx lopart = XEXP (reg, 0);
1411 rtx hipart = XEXP (reg, 1);
1412 #ifdef FRAME_GROWS_DOWNWARD
1413 /* Since part 0 should have a lower address, do it second. */
1414 put_reg_into_stack (function, hipart, part_type, part_mode,
1415 part_mode, volatilep, 0, 0, 0);
1416 put_reg_into_stack (function, lopart, part_type, part_mode,
1417 part_mode, volatilep, 0, 0, 0);
1418 #else
1419 put_reg_into_stack (function, lopart, part_type, part_mode,
1420 part_mode, volatilep, 0, 0, 0);
1421 put_reg_into_stack (function, hipart, part_type, part_mode,
1422 part_mode, volatilep, 0, 0, 0);
1423 #endif
1424
1425 /* Change the CONCAT into a combined MEM for both parts. */
1426 PUT_CODE (reg, MEM);
1427 set_mem_attributes (reg, decl, 1);
1428
1429 /* The two parts are in memory order already.
1430 Use the lower parts address as ours. */
1431 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1432 /* Prevent sharing of rtl that might lose. */
1433 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1434 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1435 if (usedp)
1436 {
1437 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1438 promoted_mode, 0);
1439 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1440 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1441 }
1442 }
1443 else
1444 return;
1445
1446 if (current_function_check_memory_usage)
1447 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK, VOIDmode,
1448 3, XEXP (reg, 0), Pmode,
1449 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1450 TYPE_MODE (sizetype),
1451 GEN_INT (MEMORY_USE_RW),
1452 TYPE_MODE (integer_type_node));
1453 }
1454
1455 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1456 into the stack frame of FUNCTION (0 means the current function).
1457 DECL_MODE is the machine mode of the user-level data type.
1458 PROMOTED_MODE is the machine mode of the register.
1459 VOLATILE_P is nonzero if this is for a "volatile" decl.
1460 USED_P is nonzero if this reg might have already been used in an insn. */
1461
1462 static void
1463 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1464 original_regno, used_p, ht)
1465 struct function *function;
1466 rtx reg;
1467 tree type;
1468 enum machine_mode promoted_mode, decl_mode;
1469 int volatile_p;
1470 unsigned int original_regno;
1471 int used_p;
1472 struct hash_table *ht;
1473 {
1474 struct function *func = function ? function : cfun;
1475 rtx new = 0;
1476 unsigned int regno = original_regno;
1477
1478 if (regno == 0)
1479 regno = REGNO (reg);
1480
1481 if (regno < func->x_max_parm_reg)
1482 new = func->x_parm_reg_stack_loc[regno];
1483
1484 if (new == 0)
1485 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1486
1487 PUT_CODE (reg, MEM);
1488 PUT_MODE (reg, decl_mode);
1489 XEXP (reg, 0) = XEXP (new, 0);
1490 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1491 MEM_VOLATILE_P (reg) = volatile_p;
1492
1493 /* If this is a memory ref that contains aggregate components,
1494 mark it as such for cse and loop optimize. If we are reusing a
1495 previously generated stack slot, then we need to copy the bit in
1496 case it was set for other reasons. For instance, it is set for
1497 __builtin_va_alist. */
1498 if (type)
1499 {
1500 MEM_SET_IN_STRUCT_P (reg,
1501 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1502 MEM_ALIAS_SET (reg) = get_alias_set (type);
1503 }
1504 if (used_p)
1505 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1506 }
1507
1508 /* Make sure that all refs to the variable, previously made
1509 when it was a register, are fixed up to be valid again.
1510 See function above for meaning of arguments. */
1511
1512 static void
1513 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1514 struct function *function;
1515 rtx reg;
1516 tree type;
1517 enum machine_mode promoted_mode;
1518 struct hash_table *ht;
1519 {
1520 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1521
1522 if (function != 0)
1523 {
1524 struct var_refs_queue *temp;
1525
1526 temp
1527 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1528 temp->modified = reg;
1529 temp->promoted_mode = promoted_mode;
1530 temp->unsignedp = unsigned_p;
1531 temp->next = function->fixup_var_refs_queue;
1532 function->fixup_var_refs_queue = temp;
1533 }
1534 else
1535 /* Variable is local; fix it up now. */
1536 fixup_var_refs (reg, promoted_mode, unsigned_p, ht);
1537 }
1538 \f
1539 static void
1540 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1541 rtx var;
1542 enum machine_mode promoted_mode;
1543 int unsignedp;
1544 struct hash_table *ht;
1545 {
1546 tree pending;
1547 rtx first_insn = get_insns ();
1548 struct sequence_stack *stack = seq_stack;
1549 tree rtl_exps = rtl_expr_chain;
1550
1551 /* If there's a hash table, it must record all uses of VAR. */
1552 if (ht)
1553 {
1554 if (stack != 0)
1555 abort ();
1556 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp);
1557 return;
1558 }
1559
1560 fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1561 stack == 0);
1562
1563 /* Scan all pending sequences too. */
1564 for (; stack; stack = stack->next)
1565 {
1566 push_to_full_sequence (stack->first, stack->last);
1567 fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1568 stack->next != 0);
1569 /* Update remembered end of sequence
1570 in case we added an insn at the end. */
1571 stack->last = get_last_insn ();
1572 end_sequence ();
1573 }
1574
1575 /* Scan all waiting RTL_EXPRs too. */
1576 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1577 {
1578 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1579 if (seq != const0_rtx && seq != 0)
1580 {
1581 push_to_sequence (seq);
1582 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
1583 end_sequence ();
1584 }
1585 }
1586
1587 /* Scan the catch clauses for exception handling too. */
1588 push_to_full_sequence (catch_clauses, catch_clauses_last);
1589 fixup_var_refs_insns (catch_clauses, var, promoted_mode, unsignedp, 0);
1590 end_full_sequence (&catch_clauses, &catch_clauses_last);
1591 }
1592 \f
1593 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1594 some part of an insn. Return a struct fixup_replacement whose OLD
1595 value is equal to X. Allocate a new structure if no such entry exists. */
1596
1597 static struct fixup_replacement *
1598 find_fixup_replacement (replacements, x)
1599 struct fixup_replacement **replacements;
1600 rtx x;
1601 {
1602 struct fixup_replacement *p;
1603
1604 /* See if we have already replaced this. */
1605 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1606 ;
1607
1608 if (p == 0)
1609 {
1610 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1611 p->old = x;
1612 p->new = 0;
1613 p->next = *replacements;
1614 *replacements = p;
1615 }
1616
1617 return p;
1618 }
1619
1620 /* Scan the insn-chain starting with INSN for refs to VAR
1621 and fix them up. TOPLEVEL is nonzero if this chain is the
1622 main chain of insns for the current function. */
1623
1624 static void
1625 fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel)
1626 rtx insn;
1627 rtx var;
1628 enum machine_mode promoted_mode;
1629 int unsignedp;
1630 int toplevel;
1631 {
1632 while (insn)
1633 {
1634 /* fixup_var_refs_insn might modify insn, so save its next
1635 pointer now. */
1636 rtx next = NEXT_INSN (insn);
1637
1638 /* CALL_PLACEHOLDERs are special; we have to switch into each of
1639 the three sequences they (potentially) contain, and process
1640 them recursively. The CALL_INSN itself is not interesting. */
1641
1642 if (GET_CODE (insn) == CALL_INSN
1643 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1644 {
1645 int i;
1646
1647 /* Look at the Normal call, sibling call and tail recursion
1648 sequences attached to the CALL_PLACEHOLDER. */
1649 for (i = 0; i < 3; i++)
1650 {
1651 rtx seq = XEXP (PATTERN (insn), i);
1652 if (seq)
1653 {
1654 push_to_sequence (seq);
1655 fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0);
1656 XEXP (PATTERN (insn), i) = get_insns ();
1657 end_sequence ();
1658 }
1659 }
1660 }
1661
1662 else if (INSN_P (insn))
1663 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel);
1664
1665 insn = next;
1666 }
1667 }
1668
1669 /* Look up the insns which reference VAR in HT and fix them up. Other
1670 arguments are the same as fixup_var_refs_insns.
1671
1672 N.B. No need for special processing of CALL_PLACEHOLDERs here,
1673 because the hash table will point straight to the interesting insn
1674 (inside the CALL_PLACEHOLDER). */
1675 static void
1676 fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp)
1677 struct hash_table *ht;
1678 rtx var;
1679 enum machine_mode promoted_mode;
1680 int unsignedp;
1681 {
1682 struct insns_for_mem_entry *ime = (struct insns_for_mem_entry *)
1683 hash_lookup (ht, var, /*create=*/0, /*copy=*/0);
1684 rtx insn_list = ime->insns;
1685
1686 while (insn_list)
1687 {
1688 rtx insn = XEXP (insn_list, 0);
1689
1690 if (INSN_P (insn))
1691 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, 0);
1692
1693 insn_list = XEXP (insn_list, 1);
1694 }
1695 }
1696
1697
1698 /* Per-insn processing by fixup_var_refs_insns(_with_hash). INSN is
1699 the insn under examination, VAR is the variable to fix up
1700 references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1701 TOPLEVEL is nonzero if this is the main insn chain for this
1702 function. */
1703 static void
1704 fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel)
1705 rtx insn;
1706 rtx var;
1707 enum machine_mode promoted_mode;
1708 int unsignedp;
1709 int toplevel;
1710 {
1711 rtx call_dest = 0;
1712 rtx set, prev, prev_set;
1713 rtx note;
1714
1715 /* Remember the notes in case we delete the insn. */
1716 note = REG_NOTES (insn);
1717
1718 /* If this is a CLOBBER of VAR, delete it.
1719
1720 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1721 and REG_RETVAL notes too. */
1722 if (GET_CODE (PATTERN (insn)) == CLOBBER
1723 && (XEXP (PATTERN (insn), 0) == var
1724 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1725 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1726 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1727 {
1728 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1729 /* The REG_LIBCALL note will go away since we are going to
1730 turn INSN into a NOTE, so just delete the
1731 corresponding REG_RETVAL note. */
1732 remove_note (XEXP (note, 0),
1733 find_reg_note (XEXP (note, 0), REG_RETVAL,
1734 NULL_RTX));
1735
1736 /* In unoptimized compilation, we shouldn't call delete_insn
1737 except in jump.c doing warnings. */
1738 PUT_CODE (insn, NOTE);
1739 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1740 NOTE_SOURCE_FILE (insn) = 0;
1741 }
1742
1743 /* The insn to load VAR from a home in the arglist
1744 is now a no-op. When we see it, just delete it.
1745 Similarly if this is storing VAR from a register from which
1746 it was loaded in the previous insn. This will occur
1747 when an ADDRESSOF was made for an arglist slot. */
1748 else if (toplevel
1749 && (set = single_set (insn)) != 0
1750 && SET_DEST (set) == var
1751 /* If this represents the result of an insn group,
1752 don't delete the insn. */
1753 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1754 && (rtx_equal_p (SET_SRC (set), var)
1755 || (GET_CODE (SET_SRC (set)) == REG
1756 && (prev = prev_nonnote_insn (insn)) != 0
1757 && (prev_set = single_set (prev)) != 0
1758 && SET_DEST (prev_set) == SET_SRC (set)
1759 && rtx_equal_p (SET_SRC (prev_set), var))))
1760 {
1761 /* In unoptimized compilation, we shouldn't call delete_insn
1762 except in jump.c doing warnings. */
1763 PUT_CODE (insn, NOTE);
1764 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1765 NOTE_SOURCE_FILE (insn) = 0;
1766 }
1767 else
1768 {
1769 struct fixup_replacement *replacements = 0;
1770 rtx next_insn = NEXT_INSN (insn);
1771
1772 if (SMALL_REGISTER_CLASSES)
1773 {
1774 /* If the insn that copies the results of a CALL_INSN
1775 into a pseudo now references VAR, we have to use an
1776 intermediate pseudo since we want the life of the
1777 return value register to be only a single insn.
1778
1779 If we don't use an intermediate pseudo, such things as
1780 address computations to make the address of VAR valid
1781 if it is not can be placed between the CALL_INSN and INSN.
1782
1783 To make sure this doesn't happen, we record the destination
1784 of the CALL_INSN and see if the next insn uses both that
1785 and VAR. */
1786
1787 if (call_dest != 0 && GET_CODE (insn) == INSN
1788 && reg_mentioned_p (var, PATTERN (insn))
1789 && reg_mentioned_p (call_dest, PATTERN (insn)))
1790 {
1791 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1792
1793 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1794
1795 PATTERN (insn) = replace_rtx (PATTERN (insn),
1796 call_dest, temp);
1797 }
1798
1799 if (GET_CODE (insn) == CALL_INSN
1800 && GET_CODE (PATTERN (insn)) == SET)
1801 call_dest = SET_DEST (PATTERN (insn));
1802 else if (GET_CODE (insn) == CALL_INSN
1803 && GET_CODE (PATTERN (insn)) == PARALLEL
1804 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1805 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1806 else
1807 call_dest = 0;
1808 }
1809
1810 /* See if we have to do anything to INSN now that VAR is in
1811 memory. If it needs to be loaded into a pseudo, use a single
1812 pseudo for the entire insn in case there is a MATCH_DUP
1813 between two operands. We pass a pointer to the head of
1814 a list of struct fixup_replacements. If fixup_var_refs_1
1815 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1816 it will record them in this list.
1817
1818 If it allocated a pseudo for any replacement, we copy into
1819 it here. */
1820
1821 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1822 &replacements);
1823
1824 /* If this is last_parm_insn, and any instructions were output
1825 after it to fix it up, then we must set last_parm_insn to
1826 the last such instruction emitted. */
1827 if (insn == last_parm_insn)
1828 last_parm_insn = PREV_INSN (next_insn);
1829
1830 while (replacements)
1831 {
1832 struct fixup_replacement *next;
1833
1834 if (GET_CODE (replacements->new) == REG)
1835 {
1836 rtx insert_before;
1837 rtx seq;
1838
1839 /* OLD might be a (subreg (mem)). */
1840 if (GET_CODE (replacements->old) == SUBREG)
1841 replacements->old
1842 = fixup_memory_subreg (replacements->old, insn, 0);
1843 else
1844 replacements->old
1845 = fixup_stack_1 (replacements->old, insn);
1846
1847 insert_before = insn;
1848
1849 /* If we are changing the mode, do a conversion.
1850 This might be wasteful, but combine.c will
1851 eliminate much of the waste. */
1852
1853 if (GET_MODE (replacements->new)
1854 != GET_MODE (replacements->old))
1855 {
1856 start_sequence ();
1857 convert_move (replacements->new,
1858 replacements->old, unsignedp);
1859 seq = gen_sequence ();
1860 end_sequence ();
1861 }
1862 else
1863 seq = gen_move_insn (replacements->new,
1864 replacements->old);
1865
1866 emit_insn_before (seq, insert_before);
1867 }
1868
1869 next = replacements->next;
1870 free (replacements);
1871 replacements = next;
1872 }
1873 }
1874
1875 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1876 But don't touch other insns referred to by reg-notes;
1877 we will get them elsewhere. */
1878 while (note)
1879 {
1880 if (GET_CODE (note) != INSN_LIST)
1881 XEXP (note, 0)
1882 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1883 note = XEXP (note, 1);
1884 }
1885 }
1886 \f
1887 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1888 See if the rtx expression at *LOC in INSN needs to be changed.
1889
1890 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1891 contain a list of original rtx's and replacements. If we find that we need
1892 to modify this insn by replacing a memory reference with a pseudo or by
1893 making a new MEM to implement a SUBREG, we consult that list to see if
1894 we have already chosen a replacement. If none has already been allocated,
1895 we allocate it and update the list. fixup_var_refs_insn will copy VAR
1896 or the SUBREG, as appropriate, to the pseudo. */
1897
1898 static void
1899 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1900 register rtx var;
1901 enum machine_mode promoted_mode;
1902 register rtx *loc;
1903 rtx insn;
1904 struct fixup_replacement **replacements;
1905 {
1906 register int i;
1907 register rtx x = *loc;
1908 RTX_CODE code = GET_CODE (x);
1909 register const char *fmt;
1910 register rtx tem, tem1;
1911 struct fixup_replacement *replacement;
1912
1913 switch (code)
1914 {
1915 case ADDRESSOF:
1916 if (XEXP (x, 0) == var)
1917 {
1918 /* Prevent sharing of rtl that might lose. */
1919 rtx sub = copy_rtx (XEXP (var, 0));
1920
1921 if (! validate_change (insn, loc, sub, 0))
1922 {
1923 rtx y = gen_reg_rtx (GET_MODE (sub));
1924 rtx seq, new_insn;
1925
1926 /* We should be able to replace with a register or all is lost.
1927 Note that we can't use validate_change to verify this, since
1928 we're not caring for replacing all dups simultaneously. */
1929 if (! validate_replace_rtx (*loc, y, insn))
1930 abort ();
1931
1932 /* Careful! First try to recognize a direct move of the
1933 value, mimicking how things are done in gen_reload wrt
1934 PLUS. Consider what happens when insn is a conditional
1935 move instruction and addsi3 clobbers flags. */
1936
1937 start_sequence ();
1938 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1939 seq = gen_sequence ();
1940 end_sequence ();
1941
1942 if (recog_memoized (new_insn) < 0)
1943 {
1944 /* That failed. Fall back on force_operand and hope. */
1945
1946 start_sequence ();
1947 sub = force_operand (sub, y);
1948 if (sub != y)
1949 emit_insn (gen_move_insn (y, sub));
1950 seq = gen_sequence ();
1951 end_sequence ();
1952 }
1953
1954 #ifdef HAVE_cc0
1955 /* Don't separate setter from user. */
1956 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1957 insn = PREV_INSN (insn);
1958 #endif
1959
1960 emit_insn_before (seq, insn);
1961 }
1962 }
1963 return;
1964
1965 case MEM:
1966 if (var == x)
1967 {
1968 /* If we already have a replacement, use it. Otherwise,
1969 try to fix up this address in case it is invalid. */
1970
1971 replacement = find_fixup_replacement (replacements, var);
1972 if (replacement->new)
1973 {
1974 *loc = replacement->new;
1975 return;
1976 }
1977
1978 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1979
1980 /* Unless we are forcing memory to register or we changed the mode,
1981 we can leave things the way they are if the insn is valid. */
1982
1983 INSN_CODE (insn) = -1;
1984 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1985 && recog_memoized (insn) >= 0)
1986 return;
1987
1988 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1989 return;
1990 }
1991
1992 /* If X contains VAR, we need to unshare it here so that we update
1993 each occurrence separately. But all identical MEMs in one insn
1994 must be replaced with the same rtx because of the possibility of
1995 MATCH_DUPs. */
1996
1997 if (reg_mentioned_p (var, x))
1998 {
1999 replacement = find_fixup_replacement (replacements, x);
2000 if (replacement->new == 0)
2001 replacement->new = copy_most_rtx (x, var);
2002
2003 *loc = x = replacement->new;
2004 code = GET_CODE (x);
2005 }
2006 break;
2007
2008 case REG:
2009 case CC0:
2010 case PC:
2011 case CONST_INT:
2012 case CONST:
2013 case SYMBOL_REF:
2014 case LABEL_REF:
2015 case CONST_DOUBLE:
2016 return;
2017
2018 case SIGN_EXTRACT:
2019 case ZERO_EXTRACT:
2020 /* Note that in some cases those types of expressions are altered
2021 by optimize_bit_field, and do not survive to get here. */
2022 if (XEXP (x, 0) == var
2023 || (GET_CODE (XEXP (x, 0)) == SUBREG
2024 && SUBREG_REG (XEXP (x, 0)) == var))
2025 {
2026 /* Get TEM as a valid MEM in the mode presently in the insn.
2027
2028 We don't worry about the possibility of MATCH_DUP here; it
2029 is highly unlikely and would be tricky to handle. */
2030
2031 tem = XEXP (x, 0);
2032 if (GET_CODE (tem) == SUBREG)
2033 {
2034 if (GET_MODE_BITSIZE (GET_MODE (tem))
2035 > GET_MODE_BITSIZE (GET_MODE (var)))
2036 {
2037 replacement = find_fixup_replacement (replacements, var);
2038 if (replacement->new == 0)
2039 replacement->new = gen_reg_rtx (GET_MODE (var));
2040 SUBREG_REG (tem) = replacement->new;
2041
2042 /* The following code works only if we have a MEM, so we
2043 need to handle the subreg here. We directly substitute
2044 it assuming that a subreg must be OK here. We already
2045 scheduled a replacement to copy the mem into the
2046 subreg. */
2047 XEXP (x, 0) = tem;
2048 return;
2049 }
2050 else
2051 tem = fixup_memory_subreg (tem, insn, 0);
2052 }
2053 else
2054 tem = fixup_stack_1 (tem, insn);
2055
2056 /* Unless we want to load from memory, get TEM into the proper mode
2057 for an extract from memory. This can only be done if the
2058 extract is at a constant position and length. */
2059
2060 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2061 && GET_CODE (XEXP (x, 2)) == CONST_INT
2062 && ! mode_dependent_address_p (XEXP (tem, 0))
2063 && ! MEM_VOLATILE_P (tem))
2064 {
2065 enum machine_mode wanted_mode = VOIDmode;
2066 enum machine_mode is_mode = GET_MODE (tem);
2067 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2068
2069 #ifdef HAVE_extzv
2070 if (GET_CODE (x) == ZERO_EXTRACT)
2071 {
2072 wanted_mode
2073 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
2074 if (wanted_mode == VOIDmode)
2075 wanted_mode = word_mode;
2076 }
2077 #endif
2078 #ifdef HAVE_extv
2079 if (GET_CODE (x) == SIGN_EXTRACT)
2080 {
2081 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
2082 if (wanted_mode == VOIDmode)
2083 wanted_mode = word_mode;
2084 }
2085 #endif
2086 /* If we have a narrower mode, we can do something. */
2087 if (wanted_mode != VOIDmode
2088 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2089 {
2090 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2091 rtx old_pos = XEXP (x, 2);
2092 rtx newmem;
2093
2094 /* If the bytes and bits are counted differently, we
2095 must adjust the offset. */
2096 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2097 offset = (GET_MODE_SIZE (is_mode)
2098 - GET_MODE_SIZE (wanted_mode) - offset);
2099
2100 pos %= GET_MODE_BITSIZE (wanted_mode);
2101
2102 newmem = gen_rtx_MEM (wanted_mode,
2103 plus_constant (XEXP (tem, 0), offset));
2104 MEM_COPY_ATTRIBUTES (newmem, tem);
2105
2106 /* Make the change and see if the insn remains valid. */
2107 INSN_CODE (insn) = -1;
2108 XEXP (x, 0) = newmem;
2109 XEXP (x, 2) = GEN_INT (pos);
2110
2111 if (recog_memoized (insn) >= 0)
2112 return;
2113
2114 /* Otherwise, restore old position. XEXP (x, 0) will be
2115 restored later. */
2116 XEXP (x, 2) = old_pos;
2117 }
2118 }
2119
2120 /* If we get here, the bitfield extract insn can't accept a memory
2121 reference. Copy the input into a register. */
2122
2123 tem1 = gen_reg_rtx (GET_MODE (tem));
2124 emit_insn_before (gen_move_insn (tem1, tem), insn);
2125 XEXP (x, 0) = tem1;
2126 return;
2127 }
2128 break;
2129
2130 case SUBREG:
2131 if (SUBREG_REG (x) == var)
2132 {
2133 /* If this is a special SUBREG made because VAR was promoted
2134 from a wider mode, replace it with VAR and call ourself
2135 recursively, this time saying that the object previously
2136 had its current mode (by virtue of the SUBREG). */
2137
2138 if (SUBREG_PROMOTED_VAR_P (x))
2139 {
2140 *loc = var;
2141 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2142 return;
2143 }
2144
2145 /* If this SUBREG makes VAR wider, it has become a paradoxical
2146 SUBREG with VAR in memory, but these aren't allowed at this
2147 stage of the compilation. So load VAR into a pseudo and take
2148 a SUBREG of that pseudo. */
2149 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2150 {
2151 replacement = find_fixup_replacement (replacements, var);
2152 if (replacement->new == 0)
2153 replacement->new = gen_reg_rtx (GET_MODE (var));
2154 SUBREG_REG (x) = replacement->new;
2155 return;
2156 }
2157
2158 /* See if we have already found a replacement for this SUBREG.
2159 If so, use it. Otherwise, make a MEM and see if the insn
2160 is recognized. If not, or if we should force MEM into a register,
2161 make a pseudo for this SUBREG. */
2162 replacement = find_fixup_replacement (replacements, x);
2163 if (replacement->new)
2164 {
2165 *loc = replacement->new;
2166 return;
2167 }
2168
2169 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2170
2171 INSN_CODE (insn) = -1;
2172 if (! flag_force_mem && recog_memoized (insn) >= 0)
2173 return;
2174
2175 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2176 return;
2177 }
2178 break;
2179
2180 case SET:
2181 /* First do special simplification of bit-field references. */
2182 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2183 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2184 optimize_bit_field (x, insn, 0);
2185 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2186 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2187 optimize_bit_field (x, insn, NULL_PTR);
2188
2189 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2190 into a register and then store it back out. */
2191 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2192 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2193 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2194 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2195 > GET_MODE_SIZE (GET_MODE (var))))
2196 {
2197 replacement = find_fixup_replacement (replacements, var);
2198 if (replacement->new == 0)
2199 replacement->new = gen_reg_rtx (GET_MODE (var));
2200
2201 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2202 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2203 }
2204
2205 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2206 insn into a pseudo and store the low part of the pseudo into VAR. */
2207 if (GET_CODE (SET_DEST (x)) == SUBREG
2208 && SUBREG_REG (SET_DEST (x)) == var
2209 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2210 > GET_MODE_SIZE (GET_MODE (var))))
2211 {
2212 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2213 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2214 tem)),
2215 insn);
2216 break;
2217 }
2218
2219 {
2220 rtx dest = SET_DEST (x);
2221 rtx src = SET_SRC (x);
2222 #ifdef HAVE_insv
2223 rtx outerdest = dest;
2224 #endif
2225
2226 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2227 || GET_CODE (dest) == SIGN_EXTRACT
2228 || GET_CODE (dest) == ZERO_EXTRACT)
2229 dest = XEXP (dest, 0);
2230
2231 if (GET_CODE (src) == SUBREG)
2232 src = XEXP (src, 0);
2233
2234 /* If VAR does not appear at the top level of the SET
2235 just scan the lower levels of the tree. */
2236
2237 if (src != var && dest != var)
2238 break;
2239
2240 /* We will need to rerecognize this insn. */
2241 INSN_CODE (insn) = -1;
2242
2243 #ifdef HAVE_insv
2244 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2245 {
2246 /* Since this case will return, ensure we fixup all the
2247 operands here. */
2248 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2249 insn, replacements);
2250 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2251 insn, replacements);
2252 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2253 insn, replacements);
2254
2255 tem = XEXP (outerdest, 0);
2256
2257 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2258 that may appear inside a ZERO_EXTRACT.
2259 This was legitimate when the MEM was a REG. */
2260 if (GET_CODE (tem) == SUBREG
2261 && SUBREG_REG (tem) == var)
2262 tem = fixup_memory_subreg (tem, insn, 0);
2263 else
2264 tem = fixup_stack_1 (tem, insn);
2265
2266 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2267 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2268 && ! mode_dependent_address_p (XEXP (tem, 0))
2269 && ! MEM_VOLATILE_P (tem))
2270 {
2271 enum machine_mode wanted_mode;
2272 enum machine_mode is_mode = GET_MODE (tem);
2273 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2274
2275 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2276 if (wanted_mode == VOIDmode)
2277 wanted_mode = word_mode;
2278
2279 /* If we have a narrower mode, we can do something. */
2280 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2281 {
2282 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2283 rtx old_pos = XEXP (outerdest, 2);
2284 rtx newmem;
2285
2286 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2287 offset = (GET_MODE_SIZE (is_mode)
2288 - GET_MODE_SIZE (wanted_mode) - offset);
2289
2290 pos %= GET_MODE_BITSIZE (wanted_mode);
2291
2292 newmem = gen_rtx_MEM (wanted_mode,
2293 plus_constant (XEXP (tem, 0),
2294 offset));
2295 MEM_COPY_ATTRIBUTES (newmem, tem);
2296
2297 /* Make the change and see if the insn remains valid. */
2298 INSN_CODE (insn) = -1;
2299 XEXP (outerdest, 0) = newmem;
2300 XEXP (outerdest, 2) = GEN_INT (pos);
2301
2302 if (recog_memoized (insn) >= 0)
2303 return;
2304
2305 /* Otherwise, restore old position. XEXP (x, 0) will be
2306 restored later. */
2307 XEXP (outerdest, 2) = old_pos;
2308 }
2309 }
2310
2311 /* If we get here, the bit-field store doesn't allow memory
2312 or isn't located at a constant position. Load the value into
2313 a register, do the store, and put it back into memory. */
2314
2315 tem1 = gen_reg_rtx (GET_MODE (tem));
2316 emit_insn_before (gen_move_insn (tem1, tem), insn);
2317 emit_insn_after (gen_move_insn (tem, tem1), insn);
2318 XEXP (outerdest, 0) = tem1;
2319 return;
2320 }
2321 #endif
2322
2323 /* STRICT_LOW_PART is a no-op on memory references
2324 and it can cause combinations to be unrecognizable,
2325 so eliminate it. */
2326
2327 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2328 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2329
2330 /* A valid insn to copy VAR into or out of a register
2331 must be left alone, to avoid an infinite loop here.
2332 If the reference to VAR is by a subreg, fix that up,
2333 since SUBREG is not valid for a memref.
2334 Also fix up the address of the stack slot.
2335
2336 Note that we must not try to recognize the insn until
2337 after we know that we have valid addresses and no
2338 (subreg (mem ...) ...) constructs, since these interfere
2339 with determining the validity of the insn. */
2340
2341 if ((SET_SRC (x) == var
2342 || (GET_CODE (SET_SRC (x)) == SUBREG
2343 && SUBREG_REG (SET_SRC (x)) == var))
2344 && (GET_CODE (SET_DEST (x)) == REG
2345 || (GET_CODE (SET_DEST (x)) == SUBREG
2346 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2347 && GET_MODE (var) == promoted_mode
2348 && x == single_set (insn))
2349 {
2350 rtx pat, last;
2351
2352 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2353 if (replacement->new)
2354 SET_SRC (x) = replacement->new;
2355 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2356 SET_SRC (x) = replacement->new
2357 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2358 else
2359 SET_SRC (x) = replacement->new
2360 = fixup_stack_1 (SET_SRC (x), insn);
2361
2362 if (recog_memoized (insn) >= 0)
2363 return;
2364
2365 /* INSN is not valid, but we know that we want to
2366 copy SET_SRC (x) to SET_DEST (x) in some way. So
2367 we generate the move and see whether it requires more
2368 than one insn. If it does, we emit those insns and
2369 delete INSN. Otherwise, we an just replace the pattern
2370 of INSN; we have already verified above that INSN has
2371 no other function that to do X. */
2372
2373 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2374 if (GET_CODE (pat) == SEQUENCE)
2375 {
2376 last = emit_insn_before (pat, insn);
2377
2378 /* INSN might have REG_RETVAL or other important notes, so
2379 we need to store the pattern of the last insn in the
2380 sequence into INSN similarly to the normal case. LAST
2381 should not have REG_NOTES, but we allow them if INSN has
2382 no REG_NOTES. */
2383 if (REG_NOTES (last) && REG_NOTES (insn))
2384 abort ();
2385 if (REG_NOTES (last))
2386 REG_NOTES (insn) = REG_NOTES (last);
2387 PATTERN (insn) = PATTERN (last);
2388
2389 PUT_CODE (last, NOTE);
2390 NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
2391 NOTE_SOURCE_FILE (last) = 0;
2392 }
2393 else
2394 PATTERN (insn) = pat;
2395
2396 return;
2397 }
2398
2399 if ((SET_DEST (x) == var
2400 || (GET_CODE (SET_DEST (x)) == SUBREG
2401 && SUBREG_REG (SET_DEST (x)) == var))
2402 && (GET_CODE (SET_SRC (x)) == REG
2403 || (GET_CODE (SET_SRC (x)) == SUBREG
2404 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2405 && GET_MODE (var) == promoted_mode
2406 && x == single_set (insn))
2407 {
2408 rtx pat, last;
2409
2410 if (GET_CODE (SET_DEST (x)) == SUBREG)
2411 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2412 else
2413 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2414
2415 if (recog_memoized (insn) >= 0)
2416 return;
2417
2418 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2419 if (GET_CODE (pat) == SEQUENCE)
2420 {
2421 last = emit_insn_before (pat, insn);
2422
2423 /* INSN might have REG_RETVAL or other important notes, so
2424 we need to store the pattern of the last insn in the
2425 sequence into INSN similarly to the normal case. LAST
2426 should not have REG_NOTES, but we allow them if INSN has
2427 no REG_NOTES. */
2428 if (REG_NOTES (last) && REG_NOTES (insn))
2429 abort ();
2430 if (REG_NOTES (last))
2431 REG_NOTES (insn) = REG_NOTES (last);
2432 PATTERN (insn) = PATTERN (last);
2433
2434 PUT_CODE (last, NOTE);
2435 NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
2436 NOTE_SOURCE_FILE (last) = 0;
2437 }
2438 else
2439 PATTERN (insn) = pat;
2440
2441 return;
2442 }
2443
2444 /* Otherwise, storing into VAR must be handled specially
2445 by storing into a temporary and copying that into VAR
2446 with a new insn after this one. Note that this case
2447 will be used when storing into a promoted scalar since
2448 the insn will now have different modes on the input
2449 and output and hence will be invalid (except for the case
2450 of setting it to a constant, which does not need any
2451 change if it is valid). We generate extra code in that case,
2452 but combine.c will eliminate it. */
2453
2454 if (dest == var)
2455 {
2456 rtx temp;
2457 rtx fixeddest = SET_DEST (x);
2458
2459 /* STRICT_LOW_PART can be discarded, around a MEM. */
2460 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2461 fixeddest = XEXP (fixeddest, 0);
2462 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2463 if (GET_CODE (fixeddest) == SUBREG)
2464 {
2465 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2466 promoted_mode = GET_MODE (fixeddest);
2467 }
2468 else
2469 fixeddest = fixup_stack_1 (fixeddest, insn);
2470
2471 temp = gen_reg_rtx (promoted_mode);
2472
2473 emit_insn_after (gen_move_insn (fixeddest,
2474 gen_lowpart (GET_MODE (fixeddest),
2475 temp)),
2476 insn);
2477
2478 SET_DEST (x) = temp;
2479 }
2480 }
2481
2482 default:
2483 break;
2484 }
2485
2486 /* Nothing special about this RTX; fix its operands. */
2487
2488 fmt = GET_RTX_FORMAT (code);
2489 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2490 {
2491 if (fmt[i] == 'e')
2492 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2493 else if (fmt[i] == 'E')
2494 {
2495 register int j;
2496 for (j = 0; j < XVECLEN (x, i); j++)
2497 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2498 insn, replacements);
2499 }
2500 }
2501 }
2502 \f
2503 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2504 return an rtx (MEM:m1 newaddr) which is equivalent.
2505 If any insns must be emitted to compute NEWADDR, put them before INSN.
2506
2507 UNCRITICAL nonzero means accept paradoxical subregs.
2508 This is used for subregs found inside REG_NOTES. */
2509
2510 static rtx
2511 fixup_memory_subreg (x, insn, uncritical)
2512 rtx x;
2513 rtx insn;
2514 int uncritical;
2515 {
2516 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2517 rtx addr = XEXP (SUBREG_REG (x), 0);
2518 enum machine_mode mode = GET_MODE (x);
2519 rtx result;
2520
2521 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2522 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2523 && ! uncritical)
2524 abort ();
2525
2526 if (BYTES_BIG_ENDIAN)
2527 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2528 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2529 addr = plus_constant (addr, offset);
2530 if (!flag_force_addr && memory_address_p (mode, addr))
2531 /* Shortcut if no insns need be emitted. */
2532 return change_address (SUBREG_REG (x), mode, addr);
2533 start_sequence ();
2534 result = change_address (SUBREG_REG (x), mode, addr);
2535 emit_insn_before (gen_sequence (), insn);
2536 end_sequence ();
2537 return result;
2538 }
2539
2540 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2541 Replace subexpressions of X in place.
2542 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2543 Otherwise return X, with its contents possibly altered.
2544
2545 If any insns must be emitted to compute NEWADDR, put them before INSN.
2546
2547 UNCRITICAL is as in fixup_memory_subreg. */
2548
2549 static rtx
2550 walk_fixup_memory_subreg (x, insn, uncritical)
2551 register rtx x;
2552 rtx insn;
2553 int uncritical;
2554 {
2555 register enum rtx_code code;
2556 register const char *fmt;
2557 register int i;
2558
2559 if (x == 0)
2560 return 0;
2561
2562 code = GET_CODE (x);
2563
2564 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2565 return fixup_memory_subreg (x, insn, uncritical);
2566
2567 /* Nothing special about this RTX; fix its operands. */
2568
2569 fmt = GET_RTX_FORMAT (code);
2570 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2571 {
2572 if (fmt[i] == 'e')
2573 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2574 else if (fmt[i] == 'E')
2575 {
2576 register int j;
2577 for (j = 0; j < XVECLEN (x, i); j++)
2578 XVECEXP (x, i, j)
2579 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2580 }
2581 }
2582 return x;
2583 }
2584 \f
2585 /* For each memory ref within X, if it refers to a stack slot
2586 with an out of range displacement, put the address in a temp register
2587 (emitting new insns before INSN to load these registers)
2588 and alter the memory ref to use that register.
2589 Replace each such MEM rtx with a copy, to avoid clobberage. */
2590
2591 static rtx
2592 fixup_stack_1 (x, insn)
2593 rtx x;
2594 rtx insn;
2595 {
2596 register int i;
2597 register RTX_CODE code = GET_CODE (x);
2598 register const char *fmt;
2599
2600 if (code == MEM)
2601 {
2602 register rtx ad = XEXP (x, 0);
2603 /* If we have address of a stack slot but it's not valid
2604 (displacement is too large), compute the sum in a register. */
2605 if (GET_CODE (ad) == PLUS
2606 && GET_CODE (XEXP (ad, 0)) == REG
2607 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2608 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2609 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2610 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2611 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2612 #endif
2613 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2614 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2615 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2616 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2617 {
2618 rtx temp, seq;
2619 if (memory_address_p (GET_MODE (x), ad))
2620 return x;
2621
2622 start_sequence ();
2623 temp = copy_to_reg (ad);
2624 seq = gen_sequence ();
2625 end_sequence ();
2626 emit_insn_before (seq, insn);
2627 return change_address (x, VOIDmode, temp);
2628 }
2629 return x;
2630 }
2631
2632 fmt = GET_RTX_FORMAT (code);
2633 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2634 {
2635 if (fmt[i] == 'e')
2636 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2637 else if (fmt[i] == 'E')
2638 {
2639 register int j;
2640 for (j = 0; j < XVECLEN (x, i); j++)
2641 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2642 }
2643 }
2644 return x;
2645 }
2646 \f
2647 /* Optimization: a bit-field instruction whose field
2648 happens to be a byte or halfword in memory
2649 can be changed to a move instruction.
2650
2651 We call here when INSN is an insn to examine or store into a bit-field.
2652 BODY is the SET-rtx to be altered.
2653
2654 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2655 (Currently this is called only from function.c, and EQUIV_MEM
2656 is always 0.) */
2657
2658 static void
2659 optimize_bit_field (body, insn, equiv_mem)
2660 rtx body;
2661 rtx insn;
2662 rtx *equiv_mem;
2663 {
2664 register rtx bitfield;
2665 int destflag;
2666 rtx seq = 0;
2667 enum machine_mode mode;
2668
2669 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2670 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2671 bitfield = SET_DEST (body), destflag = 1;
2672 else
2673 bitfield = SET_SRC (body), destflag = 0;
2674
2675 /* First check that the field being stored has constant size and position
2676 and is in fact a byte or halfword suitably aligned. */
2677
2678 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2679 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2680 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2681 != BLKmode)
2682 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2683 {
2684 register rtx memref = 0;
2685
2686 /* Now check that the containing word is memory, not a register,
2687 and that it is safe to change the machine mode. */
2688
2689 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2690 memref = XEXP (bitfield, 0);
2691 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2692 && equiv_mem != 0)
2693 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2694 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2695 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2696 memref = SUBREG_REG (XEXP (bitfield, 0));
2697 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2698 && equiv_mem != 0
2699 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2700 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2701
2702 if (memref
2703 && ! mode_dependent_address_p (XEXP (memref, 0))
2704 && ! MEM_VOLATILE_P (memref))
2705 {
2706 /* Now adjust the address, first for any subreg'ing
2707 that we are now getting rid of,
2708 and then for which byte of the word is wanted. */
2709
2710 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2711 rtx insns;
2712
2713 /* Adjust OFFSET to count bits from low-address byte. */
2714 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2715 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2716 - offset - INTVAL (XEXP (bitfield, 1)));
2717
2718 /* Adjust OFFSET to count bytes from low-address byte. */
2719 offset /= BITS_PER_UNIT;
2720 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2721 {
2722 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2723 if (BYTES_BIG_ENDIAN)
2724 offset -= (MIN (UNITS_PER_WORD,
2725 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2726 - MIN (UNITS_PER_WORD,
2727 GET_MODE_SIZE (GET_MODE (memref))));
2728 }
2729
2730 start_sequence ();
2731 memref = change_address (memref, mode,
2732 plus_constant (XEXP (memref, 0), offset));
2733 insns = get_insns ();
2734 end_sequence ();
2735 emit_insns_before (insns, insn);
2736
2737 /* Store this memory reference where
2738 we found the bit field reference. */
2739
2740 if (destflag)
2741 {
2742 validate_change (insn, &SET_DEST (body), memref, 1);
2743 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2744 {
2745 rtx src = SET_SRC (body);
2746 while (GET_CODE (src) == SUBREG
2747 && SUBREG_WORD (src) == 0)
2748 src = SUBREG_REG (src);
2749 if (GET_MODE (src) != GET_MODE (memref))
2750 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2751 validate_change (insn, &SET_SRC (body), src, 1);
2752 }
2753 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2754 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2755 /* This shouldn't happen because anything that didn't have
2756 one of these modes should have got converted explicitly
2757 and then referenced through a subreg.
2758 This is so because the original bit-field was
2759 handled by agg_mode and so its tree structure had
2760 the same mode that memref now has. */
2761 abort ();
2762 }
2763 else
2764 {
2765 rtx dest = SET_DEST (body);
2766
2767 while (GET_CODE (dest) == SUBREG
2768 && SUBREG_WORD (dest) == 0
2769 && (GET_MODE_CLASS (GET_MODE (dest))
2770 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2771 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2772 <= UNITS_PER_WORD))
2773 dest = SUBREG_REG (dest);
2774
2775 validate_change (insn, &SET_DEST (body), dest, 1);
2776
2777 if (GET_MODE (dest) == GET_MODE (memref))
2778 validate_change (insn, &SET_SRC (body), memref, 1);
2779 else
2780 {
2781 /* Convert the mem ref to the destination mode. */
2782 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2783
2784 start_sequence ();
2785 convert_move (newreg, memref,
2786 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2787 seq = get_insns ();
2788 end_sequence ();
2789
2790 validate_change (insn, &SET_SRC (body), newreg, 1);
2791 }
2792 }
2793
2794 /* See if we can convert this extraction or insertion into
2795 a simple move insn. We might not be able to do so if this
2796 was, for example, part of a PARALLEL.
2797
2798 If we succeed, write out any needed conversions. If we fail,
2799 it is hard to guess why we failed, so don't do anything
2800 special; just let the optimization be suppressed. */
2801
2802 if (apply_change_group () && seq)
2803 emit_insns_before (seq, insn);
2804 }
2805 }
2806 }
2807 \f
2808 /* These routines are responsible for converting virtual register references
2809 to the actual hard register references once RTL generation is complete.
2810
2811 The following four variables are used for communication between the
2812 routines. They contain the offsets of the virtual registers from their
2813 respective hard registers. */
2814
2815 static int in_arg_offset;
2816 static int var_offset;
2817 static int dynamic_offset;
2818 static int out_arg_offset;
2819 static int cfa_offset;
2820
2821 /* In most machines, the stack pointer register is equivalent to the bottom
2822 of the stack. */
2823
2824 #ifndef STACK_POINTER_OFFSET
2825 #define STACK_POINTER_OFFSET 0
2826 #endif
2827
2828 /* If not defined, pick an appropriate default for the offset of dynamically
2829 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2830 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2831
2832 #ifndef STACK_DYNAMIC_OFFSET
2833
2834 /* The bottom of the stack points to the actual arguments. If
2835 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2836 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2837 stack space for register parameters is not pushed by the caller, but
2838 rather part of the fixed stack areas and hence not included in
2839 `current_function_outgoing_args_size'. Nevertheless, we must allow
2840 for it when allocating stack dynamic objects. */
2841
2842 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2843 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2844 ((ACCUMULATE_OUTGOING_ARGS \
2845 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2846 + (STACK_POINTER_OFFSET)) \
2847
2848 #else
2849 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2850 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2851 + (STACK_POINTER_OFFSET))
2852 #endif
2853 #endif
2854
2855 /* On most machines, the CFA coincides with the first incoming parm. */
2856
2857 #ifndef ARG_POINTER_CFA_OFFSET
2858 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2859 #endif
2860
2861 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2862 its address taken. DECL is the decl for the object stored in the
2863 register, for later use if we do need to force REG into the stack.
2864 REG is overwritten by the MEM like in put_reg_into_stack. */
2865
2866 rtx
2867 gen_mem_addressof (reg, decl)
2868 rtx reg;
2869 tree decl;
2870 {
2871 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2872 REGNO (reg), decl);
2873
2874 /* If the original REG was a user-variable, then so is the REG whose
2875 address is being taken. Likewise for unchanging. */
2876 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2877 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2878
2879 PUT_CODE (reg, MEM);
2880 XEXP (reg, 0) = r;
2881 if (decl)
2882 {
2883 tree type = TREE_TYPE (decl);
2884
2885 PUT_MODE (reg, DECL_MODE (decl));
2886 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2887 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2888 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2889
2890 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2891 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2892 }
2893 else
2894 {
2895 /* We have no alias information about this newly created MEM. */
2896 MEM_ALIAS_SET (reg) = 0;
2897
2898 fixup_var_refs (reg, GET_MODE (reg), 0, 0);
2899 }
2900
2901 return reg;
2902 }
2903
2904 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2905
2906 void
2907 flush_addressof (decl)
2908 tree decl;
2909 {
2910 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2911 && DECL_RTL (decl) != 0
2912 && GET_CODE (DECL_RTL (decl)) == MEM
2913 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2914 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2915 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2916 }
2917
2918 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2919
2920 static void
2921 put_addressof_into_stack (r, ht)
2922 rtx r;
2923 struct hash_table *ht;
2924 {
2925 tree decl, type;
2926 int volatile_p, used_p;
2927
2928 rtx reg = XEXP (r, 0);
2929
2930 if (GET_CODE (reg) != REG)
2931 abort ();
2932
2933 decl = ADDRESSOF_DECL (r);
2934 if (decl)
2935 {
2936 type = TREE_TYPE (decl);
2937 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2938 && TREE_THIS_VOLATILE (decl));
2939 used_p = (TREE_USED (decl)
2940 || (TREE_CODE (decl) != SAVE_EXPR
2941 && DECL_INITIAL (decl) != 0));
2942 }
2943 else
2944 {
2945 type = NULL_TREE;
2946 volatile_p = 0;
2947 used_p = 1;
2948 }
2949
2950 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2951 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2952 }
2953
2954 /* List of replacements made below in purge_addressof_1 when creating
2955 bitfield insertions. */
2956 static rtx purge_bitfield_addressof_replacements;
2957
2958 /* List of replacements made below in purge_addressof_1 for patterns
2959 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2960 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2961 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2962 enough in complex cases, e.g. when some field values can be
2963 extracted by usage MEM with narrower mode. */
2964 static rtx purge_addressof_replacements;
2965
2966 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2967 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2968 the stack. If the function returns FALSE then the replacement could not
2969 be made. */
2970
2971 static boolean
2972 purge_addressof_1 (loc, insn, force, store, ht)
2973 rtx *loc;
2974 rtx insn;
2975 int force, store;
2976 struct hash_table *ht;
2977 {
2978 rtx x;
2979 RTX_CODE code;
2980 int i, j;
2981 const char *fmt;
2982 boolean result = true;
2983
2984 /* Re-start here to avoid recursion in common cases. */
2985 restart:
2986
2987 x = *loc;
2988 if (x == 0)
2989 return true;
2990
2991 code = GET_CODE (x);
2992
2993 /* If we don't return in any of the cases below, we will recurse inside
2994 the RTX, which will normally result in any ADDRESSOF being forced into
2995 memory. */
2996 if (code == SET)
2997 {
2998 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2999 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3000 return result;
3001 }
3002
3003 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
3004 {
3005 /* We must create a copy of the rtx because it was created by
3006 overwriting a REG rtx which is always shared. */
3007 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3008 rtx insns;
3009
3010 if (validate_change (insn, loc, sub, 0)
3011 || validate_replace_rtx (x, sub, insn))
3012 return true;
3013
3014 start_sequence ();
3015 sub = force_operand (sub, NULL_RTX);
3016 if (! validate_change (insn, loc, sub, 0)
3017 && ! validate_replace_rtx (x, sub, insn))
3018 abort ();
3019
3020 insns = gen_sequence ();
3021 end_sequence ();
3022 emit_insn_before (insns, insn);
3023 return true;
3024 }
3025
3026 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3027 {
3028 rtx sub = XEXP (XEXP (x, 0), 0);
3029 rtx sub2;
3030
3031 if (GET_CODE (sub) == MEM)
3032 {
3033 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
3034 MEM_COPY_ATTRIBUTES (sub2, sub);
3035 sub = sub2;
3036 }
3037 else if (GET_CODE (sub) == REG
3038 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3039 ;
3040 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3041 {
3042 int size_x, size_sub;
3043
3044 if (!insn)
3045 {
3046 /* When processing REG_NOTES look at the list of
3047 replacements done on the insn to find the register that X
3048 was replaced by. */
3049 rtx tem;
3050
3051 for (tem = purge_bitfield_addressof_replacements;
3052 tem != NULL_RTX;
3053 tem = XEXP (XEXP (tem, 1), 1))
3054 if (rtx_equal_p (x, XEXP (tem, 0)))
3055 {
3056 *loc = XEXP (XEXP (tem, 1), 0);
3057 return true;
3058 }
3059
3060 /* See comment for purge_addressof_replacements. */
3061 for (tem = purge_addressof_replacements;
3062 tem != NULL_RTX;
3063 tem = XEXP (XEXP (tem, 1), 1))
3064 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3065 {
3066 rtx z = XEXP (XEXP (tem, 1), 0);
3067
3068 if (GET_MODE (x) == GET_MODE (z)
3069 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3070 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3071 abort ();
3072
3073 /* It can happen that the note may speak of things
3074 in a wider (or just different) mode than the
3075 code did. This is especially true of
3076 REG_RETVAL. */
3077
3078 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
3079 z = SUBREG_REG (z);
3080
3081 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3082 && (GET_MODE_SIZE (GET_MODE (x))
3083 > GET_MODE_SIZE (GET_MODE (z))))
3084 {
3085 /* This can occur as a result in invalid
3086 pointer casts, e.g. float f; ...
3087 *(long long int *)&f.
3088 ??? We could emit a warning here, but
3089 without a line number that wouldn't be
3090 very helpful. */
3091 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3092 }
3093 else
3094 z = gen_lowpart (GET_MODE (x), z);
3095
3096 *loc = z;
3097 return true;
3098 }
3099
3100 /* Sometimes we may not be able to find the replacement. For
3101 example when the original insn was a MEM in a wider mode,
3102 and the note is part of a sign extension of a narrowed
3103 version of that MEM. Gcc testcase compile/990829-1.c can
3104 generate an example of this siutation. Rather than complain
3105 we return false, which will prompt our caller to remove the
3106 offending note. */
3107 return false;
3108 }
3109
3110 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3111 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3112
3113 /* Don't even consider working with paradoxical subregs,
3114 or the moral equivalent seen here. */
3115 if (size_x <= size_sub
3116 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3117 {
3118 /* Do a bitfield insertion to mirror what would happen
3119 in memory. */
3120
3121 rtx val, seq;
3122
3123 if (store)
3124 {
3125 rtx p = PREV_INSN (insn);
3126
3127 start_sequence ();
3128 val = gen_reg_rtx (GET_MODE (x));
3129 if (! validate_change (insn, loc, val, 0))
3130 {
3131 /* Discard the current sequence and put the
3132 ADDRESSOF on stack. */
3133 end_sequence ();
3134 goto give_up;
3135 }
3136 seq = gen_sequence ();
3137 end_sequence ();
3138 emit_insn_before (seq, insn);
3139 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3140 insn, ht);
3141
3142 start_sequence ();
3143 store_bit_field (sub, size_x, 0, GET_MODE (x),
3144 val, GET_MODE_SIZE (GET_MODE (sub)),
3145 GET_MODE_ALIGNMENT (GET_MODE (sub)));
3146
3147 /* Make sure to unshare any shared rtl that store_bit_field
3148 might have created. */
3149 unshare_all_rtl_again (get_insns ());
3150
3151 seq = gen_sequence ();
3152 end_sequence ();
3153 p = emit_insn_after (seq, insn);
3154 if (NEXT_INSN (insn))
3155 compute_insns_for_mem (NEXT_INSN (insn),
3156 p ? NEXT_INSN (p) : NULL_RTX,
3157 ht);
3158 }
3159 else
3160 {
3161 rtx p = PREV_INSN (insn);
3162
3163 start_sequence ();
3164 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3165 GET_MODE (x), GET_MODE (x),
3166 GET_MODE_SIZE (GET_MODE (sub)),
3167 GET_MODE_SIZE (GET_MODE (sub)));
3168
3169 if (! validate_change (insn, loc, val, 0))
3170 {
3171 /* Discard the current sequence and put the
3172 ADDRESSOF on stack. */
3173 end_sequence ();
3174 goto give_up;
3175 }
3176
3177 seq = gen_sequence ();
3178 end_sequence ();
3179 emit_insn_before (seq, insn);
3180 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3181 insn, ht);
3182 }
3183
3184 /* Remember the replacement so that the same one can be done
3185 on the REG_NOTES. */
3186 purge_bitfield_addressof_replacements
3187 = gen_rtx_EXPR_LIST (VOIDmode, x,
3188 gen_rtx_EXPR_LIST
3189 (VOIDmode, val,
3190 purge_bitfield_addressof_replacements));
3191
3192 /* We replaced with a reg -- all done. */
3193 return true;
3194 }
3195 }
3196
3197 else if (validate_change (insn, loc, sub, 0))
3198 {
3199 /* Remember the replacement so that the same one can be done
3200 on the REG_NOTES. */
3201 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3202 {
3203 rtx tem;
3204
3205 for (tem = purge_addressof_replacements;
3206 tem != NULL_RTX;
3207 tem = XEXP (XEXP (tem, 1), 1))
3208 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3209 {
3210 XEXP (XEXP (tem, 1), 0) = sub;
3211 return true;
3212 }
3213 purge_addressof_replacements
3214 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3215 gen_rtx_EXPR_LIST (VOIDmode, sub,
3216 purge_addressof_replacements));
3217 return true;
3218 }
3219 goto restart;
3220 }
3221 give_up:;
3222 /* else give up and put it into the stack */
3223 }
3224
3225 else if (code == ADDRESSOF)
3226 {
3227 put_addressof_into_stack (x, ht);
3228 return true;
3229 }
3230 else if (code == SET)
3231 {
3232 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3233 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3234 return result;
3235 }
3236
3237 /* Scan all subexpressions. */
3238 fmt = GET_RTX_FORMAT (code);
3239 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3240 {
3241 if (*fmt == 'e')
3242 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3243 else if (*fmt == 'E')
3244 for (j = 0; j < XVECLEN (x, i); j++)
3245 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3246 }
3247
3248 return result;
3249 }
3250
3251 /* Return a new hash table entry in HT. */
3252
3253 static struct hash_entry *
3254 insns_for_mem_newfunc (he, ht, k)
3255 struct hash_entry *he;
3256 struct hash_table *ht;
3257 hash_table_key k ATTRIBUTE_UNUSED;
3258 {
3259 struct insns_for_mem_entry *ifmhe;
3260 if (he)
3261 return he;
3262
3263 ifmhe = ((struct insns_for_mem_entry *)
3264 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3265 ifmhe->insns = NULL_RTX;
3266
3267 return &ifmhe->he;
3268 }
3269
3270 /* Return a hash value for K, a REG. */
3271
3272 static unsigned long
3273 insns_for_mem_hash (k)
3274 hash_table_key k;
3275 {
3276 /* K is really a RTX. Just use the address as the hash value. */
3277 return (unsigned long) k;
3278 }
3279
3280 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3281
3282 static boolean
3283 insns_for_mem_comp (k1, k2)
3284 hash_table_key k1;
3285 hash_table_key k2;
3286 {
3287 return k1 == k2;
3288 }
3289
3290 struct insns_for_mem_walk_info {
3291 /* The hash table that we are using to record which INSNs use which
3292 MEMs. */
3293 struct hash_table *ht;
3294
3295 /* The INSN we are currently proessing. */
3296 rtx insn;
3297
3298 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3299 to find the insns that use the REGs in the ADDRESSOFs. */
3300 int pass;
3301 };
3302
3303 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3304 that might be used in an ADDRESSOF expression, record this INSN in
3305 the hash table given by DATA (which is really a pointer to an
3306 insns_for_mem_walk_info structure). */
3307
3308 static int
3309 insns_for_mem_walk (r, data)
3310 rtx *r;
3311 void *data;
3312 {
3313 struct insns_for_mem_walk_info *ifmwi
3314 = (struct insns_for_mem_walk_info *) data;
3315
3316 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3317 && GET_CODE (XEXP (*r, 0)) == REG)
3318 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3319 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3320 {
3321 /* Lookup this MEM in the hashtable, creating it if necessary. */
3322 struct insns_for_mem_entry *ifme
3323 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3324 *r,
3325 /*create=*/0,
3326 /*copy=*/0);
3327
3328 /* If we have not already recorded this INSN, do so now. Since
3329 we process the INSNs in order, we know that if we have
3330 recorded it it must be at the front of the list. */
3331 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3332 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3333 ifme->insns);
3334 }
3335
3336 return 0;
3337 }
3338
3339 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3340 which REGs in HT. */
3341
3342 static void
3343 compute_insns_for_mem (insns, last_insn, ht)
3344 rtx insns;
3345 rtx last_insn;
3346 struct hash_table *ht;
3347 {
3348 rtx insn;
3349 struct insns_for_mem_walk_info ifmwi;
3350 ifmwi.ht = ht;
3351
3352 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3353 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3354 if (INSN_P (insn))
3355 {
3356 ifmwi.insn = insn;
3357 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3358 }
3359 }
3360
3361 /* Helper function for purge_addressof called through for_each_rtx.
3362 Returns true iff the rtl is an ADDRESSOF. */
3363 static int
3364 is_addressof (rtl, data)
3365 rtx *rtl;
3366 void *data ATTRIBUTE_UNUSED;
3367 {
3368 return GET_CODE (*rtl) == ADDRESSOF;
3369 }
3370
3371 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3372 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3373 stack. */
3374
3375 void
3376 purge_addressof (insns)
3377 rtx insns;
3378 {
3379 rtx insn;
3380 struct hash_table ht;
3381
3382 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3383 requires a fixup pass over the instruction stream to correct
3384 INSNs that depended on the REG being a REG, and not a MEM. But,
3385 these fixup passes are slow. Furthermore, most MEMs are not
3386 mentioned in very many instructions. So, we speed up the process
3387 by pre-calculating which REGs occur in which INSNs; that allows
3388 us to perform the fixup passes much more quickly. */
3389 hash_table_init (&ht,
3390 insns_for_mem_newfunc,
3391 insns_for_mem_hash,
3392 insns_for_mem_comp);
3393 compute_insns_for_mem (insns, NULL_RTX, &ht);
3394
3395 for (insn = insns; insn; insn = NEXT_INSN (insn))
3396 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3397 || GET_CODE (insn) == CALL_INSN)
3398 {
3399 if (! purge_addressof_1 (&PATTERN (insn), insn,
3400 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3401 /* If we could not replace the ADDRESSOFs in the insn,
3402 something is wrong. */
3403 abort ();
3404
3405 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3406 {
3407 /* If we could not replace the ADDRESSOFs in the insn's notes,
3408 we can just remove the offending notes instead. */
3409 rtx note;
3410
3411 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3412 {
3413 /* If we find a REG_RETVAL note then the insn is a libcall.
3414 Such insns must have REG_EQUAL notes as well, in order
3415 for later passes of the compiler to work. So it is not
3416 safe to delete the notes here, and instead we abort. */
3417 if (REG_NOTE_KIND (note) == REG_RETVAL)
3418 abort ();
3419 if (for_each_rtx (&note, is_addressof, NULL))
3420 remove_note (insn, note);
3421 }
3422 }
3423 }
3424
3425 /* Clean up. */
3426 hash_table_free (&ht);
3427 purge_bitfield_addressof_replacements = 0;
3428 purge_addressof_replacements = 0;
3429
3430 /* REGs are shared. purge_addressof will destructively replace a REG
3431 with a MEM, which creates shared MEMs.
3432
3433 Unfortunately, the children of put_reg_into_stack assume that MEMs
3434 referring to the same stack slot are shared (fixup_var_refs and
3435 the associated hash table code).
3436
3437 So, we have to do another unsharing pass after we have flushed any
3438 REGs that had their address taken into the stack.
3439
3440 It may be worth tracking whether or not we converted any REGs into
3441 MEMs to avoid this overhead when it is not needed. */
3442 unshare_all_rtl_again (get_insns ());
3443 }
3444 \f
3445 /* Convert a SET of a hard subreg to a set of the appropriet hard
3446 register. A subroutine of purge_hard_subreg_sets. */
3447
3448 static void
3449 purge_single_hard_subreg_set (pattern)
3450 rtx pattern;
3451 {
3452 rtx reg = SET_DEST (pattern);
3453 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3454 int word = 0;
3455
3456 while (GET_CODE (reg) == SUBREG)
3457 {
3458 word += SUBREG_WORD (reg);
3459 reg = SUBREG_REG (reg);
3460 }
3461
3462 if (REGNO (reg) < FIRST_PSEUDO_REGISTER)
3463 {
3464 reg = gen_rtx_REG (mode, REGNO (reg) + word);
3465 SET_DEST (pattern) = reg;
3466 }
3467 }
3468
3469 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3470 only such SETs that we expect to see are those left in because
3471 integrate can't handle sets of parts of a return value register.
3472
3473 We don't use alter_subreg because we only want to eliminate subregs
3474 of hard registers. */
3475
3476 void
3477 purge_hard_subreg_sets (insn)
3478 rtx insn;
3479 {
3480 for (; insn; insn = NEXT_INSN (insn))
3481 {
3482 if (INSN_P (insn))
3483 {
3484 rtx pattern = PATTERN (insn);
3485 switch (GET_CODE (pattern))
3486 {
3487 case SET:
3488 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3489 purge_single_hard_subreg_set (pattern);
3490 break;
3491 case PARALLEL:
3492 {
3493 int j;
3494 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3495 {
3496 rtx inner_pattern = XVECEXP (pattern, 0, j);
3497 if (GET_CODE (inner_pattern) == SET
3498 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3499 purge_single_hard_subreg_set (inner_pattern);
3500 }
3501 }
3502 break;
3503 default:
3504 break;
3505 }
3506 }
3507 }
3508 }
3509 \f
3510 /* Pass through the INSNS of function FNDECL and convert virtual register
3511 references to hard register references. */
3512
3513 void
3514 instantiate_virtual_regs (fndecl, insns)
3515 tree fndecl;
3516 rtx insns;
3517 {
3518 rtx insn;
3519 unsigned int i;
3520
3521 /* Compute the offsets to use for this function. */
3522 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3523 var_offset = STARTING_FRAME_OFFSET;
3524 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3525 out_arg_offset = STACK_POINTER_OFFSET;
3526 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3527
3528 /* Scan all variables and parameters of this function. For each that is
3529 in memory, instantiate all virtual registers if the result is a valid
3530 address. If not, we do it later. That will handle most uses of virtual
3531 regs on many machines. */
3532 instantiate_decls (fndecl, 1);
3533
3534 /* Initialize recognition, indicating that volatile is OK. */
3535 init_recog ();
3536
3537 /* Scan through all the insns, instantiating every virtual register still
3538 present. */
3539 for (insn = insns; insn; insn = NEXT_INSN (insn))
3540 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3541 || GET_CODE (insn) == CALL_INSN)
3542 {
3543 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3544 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3545 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
3546 if (GET_CODE (insn) == CALL_INSN)
3547 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3548 NULL_RTX, 0);
3549 }
3550
3551 /* Instantiate the stack slots for the parm registers, for later use in
3552 addressof elimination. */
3553 for (i = 0; i < max_parm_reg; ++i)
3554 if (parm_reg_stack_loc[i])
3555 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3556
3557 /* Now instantiate the remaining register equivalences for debugging info.
3558 These will not be valid addresses. */
3559 instantiate_decls (fndecl, 0);
3560
3561 /* Indicate that, from now on, assign_stack_local should use
3562 frame_pointer_rtx. */
3563 virtuals_instantiated = 1;
3564 }
3565
3566 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3567 all virtual registers in their DECL_RTL's.
3568
3569 If VALID_ONLY, do this only if the resulting address is still valid.
3570 Otherwise, always do it. */
3571
3572 static void
3573 instantiate_decls (fndecl, valid_only)
3574 tree fndecl;
3575 int valid_only;
3576 {
3577 tree decl;
3578
3579 /* Process all parameters of the function. */
3580 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3581 {
3582 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3583
3584 instantiate_decl (DECL_RTL (decl), size, valid_only);
3585
3586 /* If the parameter was promoted, then the incoming RTL mode may be
3587 larger than the declared type size. We must use the larger of
3588 the two sizes. */
3589 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3590 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3591 }
3592
3593 /* Now process all variables defined in the function or its subblocks. */
3594 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3595 }
3596
3597 /* Subroutine of instantiate_decls: Process all decls in the given
3598 BLOCK node and all its subblocks. */
3599
3600 static void
3601 instantiate_decls_1 (let, valid_only)
3602 tree let;
3603 int valid_only;
3604 {
3605 tree t;
3606
3607 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3608 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3609 valid_only);
3610
3611 /* Process all subblocks. */
3612 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3613 instantiate_decls_1 (t, valid_only);
3614 }
3615
3616 /* Subroutine of the preceding procedures: Given RTL representing a
3617 decl and the size of the object, do any instantiation required.
3618
3619 If VALID_ONLY is non-zero, it means that the RTL should only be
3620 changed if the new address is valid. */
3621
3622 static void
3623 instantiate_decl (x, size, valid_only)
3624 rtx x;
3625 HOST_WIDE_INT size;
3626 int valid_only;
3627 {
3628 enum machine_mode mode;
3629 rtx addr;
3630
3631 /* If this is not a MEM, no need to do anything. Similarly if the
3632 address is a constant or a register that is not a virtual register. */
3633
3634 if (x == 0 || GET_CODE (x) != MEM)
3635 return;
3636
3637 addr = XEXP (x, 0);
3638 if (CONSTANT_P (addr)
3639 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3640 || (GET_CODE (addr) == REG
3641 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3642 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3643 return;
3644
3645 /* If we should only do this if the address is valid, copy the address.
3646 We need to do this so we can undo any changes that might make the
3647 address invalid. This copy is unfortunate, but probably can't be
3648 avoided. */
3649
3650 if (valid_only)
3651 addr = copy_rtx (addr);
3652
3653 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3654
3655 if (valid_only && size >= 0)
3656 {
3657 unsigned HOST_WIDE_INT decl_size = size;
3658
3659 /* Now verify that the resulting address is valid for every integer or
3660 floating-point mode up to and including SIZE bytes long. We do this
3661 since the object might be accessed in any mode and frame addresses
3662 are shared. */
3663
3664 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3665 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3666 mode = GET_MODE_WIDER_MODE (mode))
3667 if (! memory_address_p (mode, addr))
3668 return;
3669
3670 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3671 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3672 mode = GET_MODE_WIDER_MODE (mode))
3673 if (! memory_address_p (mode, addr))
3674 return;
3675 }
3676
3677 /* Put back the address now that we have updated it and we either know
3678 it is valid or we don't care whether it is valid. */
3679
3680 XEXP (x, 0) = addr;
3681 }
3682 \f
3683 /* Given a pointer to a piece of rtx and an optional pointer to the
3684 containing object, instantiate any virtual registers present in it.
3685
3686 If EXTRA_INSNS, we always do the replacement and generate
3687 any extra insns before OBJECT. If it zero, we do nothing if replacement
3688 is not valid.
3689
3690 Return 1 if we either had nothing to do or if we were able to do the
3691 needed replacement. Return 0 otherwise; we only return zero if
3692 EXTRA_INSNS is zero.
3693
3694 We first try some simple transformations to avoid the creation of extra
3695 pseudos. */
3696
3697 static int
3698 instantiate_virtual_regs_1 (loc, object, extra_insns)
3699 rtx *loc;
3700 rtx object;
3701 int extra_insns;
3702 {
3703 rtx x;
3704 RTX_CODE code;
3705 rtx new = 0;
3706 HOST_WIDE_INT offset = 0;
3707 rtx temp;
3708 rtx seq;
3709 int i, j;
3710 const char *fmt;
3711
3712 /* Re-start here to avoid recursion in common cases. */
3713 restart:
3714
3715 x = *loc;
3716 if (x == 0)
3717 return 1;
3718
3719 code = GET_CODE (x);
3720
3721 /* Check for some special cases. */
3722 switch (code)
3723 {
3724 case CONST_INT:
3725 case CONST_DOUBLE:
3726 case CONST:
3727 case SYMBOL_REF:
3728 case CODE_LABEL:
3729 case PC:
3730 case CC0:
3731 case ASM_INPUT:
3732 case ADDR_VEC:
3733 case ADDR_DIFF_VEC:
3734 case RETURN:
3735 return 1;
3736
3737 case SET:
3738 /* We are allowed to set the virtual registers. This means that
3739 the actual register should receive the source minus the
3740 appropriate offset. This is used, for example, in the handling
3741 of non-local gotos. */
3742 if (SET_DEST (x) == virtual_incoming_args_rtx)
3743 new = arg_pointer_rtx, offset = -in_arg_offset;
3744 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3745 new = frame_pointer_rtx, offset = -var_offset;
3746 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3747 new = stack_pointer_rtx, offset = -dynamic_offset;
3748 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3749 new = stack_pointer_rtx, offset = -out_arg_offset;
3750 else if (SET_DEST (x) == virtual_cfa_rtx)
3751 new = arg_pointer_rtx, offset = -cfa_offset;
3752
3753 if (new)
3754 {
3755 rtx src = SET_SRC (x);
3756
3757 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3758
3759 /* The only valid sources here are PLUS or REG. Just do
3760 the simplest possible thing to handle them. */
3761 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3762 abort ();
3763
3764 start_sequence ();
3765 if (GET_CODE (src) != REG)
3766 temp = force_operand (src, NULL_RTX);
3767 else
3768 temp = src;
3769 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3770 seq = get_insns ();
3771 end_sequence ();
3772
3773 emit_insns_before (seq, object);
3774 SET_DEST (x) = new;
3775
3776 if (! validate_change (object, &SET_SRC (x), temp, 0)
3777 || ! extra_insns)
3778 abort ();
3779
3780 return 1;
3781 }
3782
3783 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3784 loc = &SET_SRC (x);
3785 goto restart;
3786
3787 case PLUS:
3788 /* Handle special case of virtual register plus constant. */
3789 if (CONSTANT_P (XEXP (x, 1)))
3790 {
3791 rtx old, new_offset;
3792
3793 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3794 if (GET_CODE (XEXP (x, 0)) == PLUS)
3795 {
3796 rtx inner = XEXP (XEXP (x, 0), 0);
3797
3798 if (inner == virtual_incoming_args_rtx)
3799 new = arg_pointer_rtx, offset = in_arg_offset;
3800 else if (inner == virtual_stack_vars_rtx)
3801 new = frame_pointer_rtx, offset = var_offset;
3802 else if (inner == virtual_stack_dynamic_rtx)
3803 new = stack_pointer_rtx, offset = dynamic_offset;
3804 else if (inner == virtual_outgoing_args_rtx)
3805 new = stack_pointer_rtx, offset = out_arg_offset;
3806 else if (inner == virtual_cfa_rtx)
3807 new = arg_pointer_rtx, offset = cfa_offset;
3808 else
3809 {
3810 loc = &XEXP (x, 0);
3811 goto restart;
3812 }
3813
3814 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3815 extra_insns);
3816 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3817 }
3818
3819 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3820 new = arg_pointer_rtx, offset = in_arg_offset;
3821 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3822 new = frame_pointer_rtx, offset = var_offset;
3823 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3824 new = stack_pointer_rtx, offset = dynamic_offset;
3825 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3826 new = stack_pointer_rtx, offset = out_arg_offset;
3827 else if (XEXP (x, 0) == virtual_cfa_rtx)
3828 new = arg_pointer_rtx, offset = cfa_offset;
3829 else
3830 {
3831 /* We know the second operand is a constant. Unless the
3832 first operand is a REG (which has been already checked),
3833 it needs to be checked. */
3834 if (GET_CODE (XEXP (x, 0)) != REG)
3835 {
3836 loc = &XEXP (x, 0);
3837 goto restart;
3838 }
3839 return 1;
3840 }
3841
3842 new_offset = plus_constant (XEXP (x, 1), offset);
3843
3844 /* If the new constant is zero, try to replace the sum with just
3845 the register. */
3846 if (new_offset == const0_rtx
3847 && validate_change (object, loc, new, 0))
3848 return 1;
3849
3850 /* Next try to replace the register and new offset.
3851 There are two changes to validate here and we can't assume that
3852 in the case of old offset equals new just changing the register
3853 will yield a valid insn. In the interests of a little efficiency,
3854 however, we only call validate change once (we don't queue up the
3855 changes and then call apply_change_group). */
3856
3857 old = XEXP (x, 0);
3858 if (offset == 0
3859 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3860 : (XEXP (x, 0) = new,
3861 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3862 {
3863 if (! extra_insns)
3864 {
3865 XEXP (x, 0) = old;
3866 return 0;
3867 }
3868
3869 /* Otherwise copy the new constant into a register and replace
3870 constant with that register. */
3871 temp = gen_reg_rtx (Pmode);
3872 XEXP (x, 0) = new;
3873 if (validate_change (object, &XEXP (x, 1), temp, 0))
3874 emit_insn_before (gen_move_insn (temp, new_offset), object);
3875 else
3876 {
3877 /* If that didn't work, replace this expression with a
3878 register containing the sum. */
3879
3880 XEXP (x, 0) = old;
3881 new = gen_rtx_PLUS (Pmode, new, new_offset);
3882
3883 start_sequence ();
3884 temp = force_operand (new, NULL_RTX);
3885 seq = get_insns ();
3886 end_sequence ();
3887
3888 emit_insns_before (seq, object);
3889 if (! validate_change (object, loc, temp, 0)
3890 && ! validate_replace_rtx (x, temp, object))
3891 abort ();
3892 }
3893 }
3894
3895 return 1;
3896 }
3897
3898 /* Fall through to generic two-operand expression case. */
3899 case EXPR_LIST:
3900 case CALL:
3901 case COMPARE:
3902 case MINUS:
3903 case MULT:
3904 case DIV: case UDIV:
3905 case MOD: case UMOD:
3906 case AND: case IOR: case XOR:
3907 case ROTATERT: case ROTATE:
3908 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3909 case NE: case EQ:
3910 case GE: case GT: case GEU: case GTU:
3911 case LE: case LT: case LEU: case LTU:
3912 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3913 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3914 loc = &XEXP (x, 0);
3915 goto restart;
3916
3917 case MEM:
3918 /* Most cases of MEM that convert to valid addresses have already been
3919 handled by our scan of decls. The only special handling we
3920 need here is to make a copy of the rtx to ensure it isn't being
3921 shared if we have to change it to a pseudo.
3922
3923 If the rtx is a simple reference to an address via a virtual register,
3924 it can potentially be shared. In such cases, first try to make it
3925 a valid address, which can also be shared. Otherwise, copy it and
3926 proceed normally.
3927
3928 First check for common cases that need no processing. These are
3929 usually due to instantiation already being done on a previous instance
3930 of a shared rtx. */
3931
3932 temp = XEXP (x, 0);
3933 if (CONSTANT_ADDRESS_P (temp)
3934 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3935 || temp == arg_pointer_rtx
3936 #endif
3937 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3938 || temp == hard_frame_pointer_rtx
3939 #endif
3940 || temp == frame_pointer_rtx)
3941 return 1;
3942
3943 if (GET_CODE (temp) == PLUS
3944 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3945 && (XEXP (temp, 0) == frame_pointer_rtx
3946 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3947 || XEXP (temp, 0) == hard_frame_pointer_rtx
3948 #endif
3949 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3950 || XEXP (temp, 0) == arg_pointer_rtx
3951 #endif
3952 ))
3953 return 1;
3954
3955 if (temp == virtual_stack_vars_rtx
3956 || temp == virtual_incoming_args_rtx
3957 || (GET_CODE (temp) == PLUS
3958 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3959 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3960 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3961 {
3962 /* This MEM may be shared. If the substitution can be done without
3963 the need to generate new pseudos, we want to do it in place
3964 so all copies of the shared rtx benefit. The call below will
3965 only make substitutions if the resulting address is still
3966 valid.
3967
3968 Note that we cannot pass X as the object in the recursive call
3969 since the insn being processed may not allow all valid
3970 addresses. However, if we were not passed on object, we can
3971 only modify X without copying it if X will have a valid
3972 address.
3973
3974 ??? Also note that this can still lose if OBJECT is an insn that
3975 has less restrictions on an address that some other insn.
3976 In that case, we will modify the shared address. This case
3977 doesn't seem very likely, though. One case where this could
3978 happen is in the case of a USE or CLOBBER reference, but we
3979 take care of that below. */
3980
3981 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3982 object ? object : x, 0))
3983 return 1;
3984
3985 /* Otherwise make a copy and process that copy. We copy the entire
3986 RTL expression since it might be a PLUS which could also be
3987 shared. */
3988 *loc = x = copy_rtx (x);
3989 }
3990
3991 /* Fall through to generic unary operation case. */
3992 case SUBREG:
3993 case STRICT_LOW_PART:
3994 case NEG: case NOT:
3995 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3996 case SIGN_EXTEND: case ZERO_EXTEND:
3997 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3998 case FLOAT: case FIX:
3999 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4000 case ABS:
4001 case SQRT:
4002 case FFS:
4003 /* These case either have just one operand or we know that we need not
4004 check the rest of the operands. */
4005 loc = &XEXP (x, 0);
4006 goto restart;
4007
4008 case USE:
4009 case CLOBBER:
4010 /* If the operand is a MEM, see if the change is a valid MEM. If not,
4011 go ahead and make the invalid one, but do it to a copy. For a REG,
4012 just make the recursive call, since there's no chance of a problem. */
4013
4014 if ((GET_CODE (XEXP (x, 0)) == MEM
4015 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4016 0))
4017 || (GET_CODE (XEXP (x, 0)) == REG
4018 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4019 return 1;
4020
4021 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4022 loc = &XEXP (x, 0);
4023 goto restart;
4024
4025 case REG:
4026 /* Try to replace with a PLUS. If that doesn't work, compute the sum
4027 in front of this insn and substitute the temporary. */
4028 if (x == virtual_incoming_args_rtx)
4029 new = arg_pointer_rtx, offset = in_arg_offset;
4030 else if (x == virtual_stack_vars_rtx)
4031 new = frame_pointer_rtx, offset = var_offset;
4032 else if (x == virtual_stack_dynamic_rtx)
4033 new = stack_pointer_rtx, offset = dynamic_offset;
4034 else if (x == virtual_outgoing_args_rtx)
4035 new = stack_pointer_rtx, offset = out_arg_offset;
4036 else if (x == virtual_cfa_rtx)
4037 new = arg_pointer_rtx, offset = cfa_offset;
4038
4039 if (new)
4040 {
4041 temp = plus_constant (new, offset);
4042 if (!validate_change (object, loc, temp, 0))
4043 {
4044 if (! extra_insns)
4045 return 0;
4046
4047 start_sequence ();
4048 temp = force_operand (temp, NULL_RTX);
4049 seq = get_insns ();
4050 end_sequence ();
4051
4052 emit_insns_before (seq, object);
4053 if (! validate_change (object, loc, temp, 0)
4054 && ! validate_replace_rtx (x, temp, object))
4055 abort ();
4056 }
4057 }
4058
4059 return 1;
4060
4061 case ADDRESSOF:
4062 if (GET_CODE (XEXP (x, 0)) == REG)
4063 return 1;
4064
4065 else if (GET_CODE (XEXP (x, 0)) == MEM)
4066 {
4067 /* If we have a (addressof (mem ..)), do any instantiation inside
4068 since we know we'll be making the inside valid when we finally
4069 remove the ADDRESSOF. */
4070 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4071 return 1;
4072 }
4073 break;
4074
4075 default:
4076 break;
4077 }
4078
4079 /* Scan all subexpressions. */
4080 fmt = GET_RTX_FORMAT (code);
4081 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4082 if (*fmt == 'e')
4083 {
4084 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4085 return 0;
4086 }
4087 else if (*fmt == 'E')
4088 for (j = 0; j < XVECLEN (x, i); j++)
4089 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4090 extra_insns))
4091 return 0;
4092
4093 return 1;
4094 }
4095 \f
4096 /* Optimization: assuming this function does not receive nonlocal gotos,
4097 delete the handlers for such, as well as the insns to establish
4098 and disestablish them. */
4099
4100 static void
4101 delete_handlers ()
4102 {
4103 rtx insn;
4104 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4105 {
4106 /* Delete the handler by turning off the flag that would
4107 prevent jump_optimize from deleting it.
4108 Also permit deletion of the nonlocal labels themselves
4109 if nothing local refers to them. */
4110 if (GET_CODE (insn) == CODE_LABEL)
4111 {
4112 tree t, last_t;
4113
4114 LABEL_PRESERVE_P (insn) = 0;
4115
4116 /* Remove it from the nonlocal_label list, to avoid confusing
4117 flow. */
4118 for (t = nonlocal_labels, last_t = 0; t;
4119 last_t = t, t = TREE_CHAIN (t))
4120 if (DECL_RTL (TREE_VALUE (t)) == insn)
4121 break;
4122 if (t)
4123 {
4124 if (! last_t)
4125 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4126 else
4127 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4128 }
4129 }
4130 if (GET_CODE (insn) == INSN)
4131 {
4132 int can_delete = 0;
4133 rtx t;
4134 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4135 if (reg_mentioned_p (t, PATTERN (insn)))
4136 {
4137 can_delete = 1;
4138 break;
4139 }
4140 if (can_delete
4141 || (nonlocal_goto_stack_level != 0
4142 && reg_mentioned_p (nonlocal_goto_stack_level,
4143 PATTERN (insn))))
4144 delete_insn (insn);
4145 }
4146 }
4147 }
4148 \f
4149 int
4150 max_parm_reg_num ()
4151 {
4152 return max_parm_reg;
4153 }
4154
4155 /* Return the first insn following those generated by `assign_parms'. */
4156
4157 rtx
4158 get_first_nonparm_insn ()
4159 {
4160 if (last_parm_insn)
4161 return NEXT_INSN (last_parm_insn);
4162 return get_insns ();
4163 }
4164
4165 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4166 Crash if there is none. */
4167
4168 rtx
4169 get_first_block_beg ()
4170 {
4171 register rtx searcher;
4172 register rtx insn = get_first_nonparm_insn ();
4173
4174 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4175 if (GET_CODE (searcher) == NOTE
4176 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4177 return searcher;
4178
4179 abort (); /* Invalid call to this function. (See comments above.) */
4180 return NULL_RTX;
4181 }
4182
4183 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4184 This means a type for which function calls must pass an address to the
4185 function or get an address back from the function.
4186 EXP may be a type node or an expression (whose type is tested). */
4187
4188 int
4189 aggregate_value_p (exp)
4190 tree exp;
4191 {
4192 int i, regno, nregs;
4193 rtx reg;
4194
4195 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4196
4197 if (TREE_CODE (type) == VOID_TYPE)
4198 return 0;
4199 if (RETURN_IN_MEMORY (type))
4200 return 1;
4201 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4202 and thus can't be returned in registers. */
4203 if (TREE_ADDRESSABLE (type))
4204 return 1;
4205 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4206 return 1;
4207 /* Make sure we have suitable call-clobbered regs to return
4208 the value in; if not, we must return it in memory. */
4209 reg = hard_function_value (type, 0, 0);
4210
4211 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4212 it is OK. */
4213 if (GET_CODE (reg) != REG)
4214 return 0;
4215
4216 regno = REGNO (reg);
4217 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4218 for (i = 0; i < nregs; i++)
4219 if (! call_used_regs[regno + i])
4220 return 1;
4221 return 0;
4222 }
4223 \f
4224 /* Assign RTL expressions to the function's parameters.
4225 This may involve copying them into registers and using
4226 those registers as the RTL for them. */
4227
4228 void
4229 assign_parms (fndecl)
4230 tree fndecl;
4231 {
4232 register tree parm;
4233 register rtx entry_parm = 0;
4234 register rtx stack_parm = 0;
4235 CUMULATIVE_ARGS args_so_far;
4236 enum machine_mode promoted_mode, passed_mode;
4237 enum machine_mode nominal_mode, promoted_nominal_mode;
4238 int unsignedp;
4239 /* Total space needed so far for args on the stack,
4240 given as a constant and a tree-expression. */
4241 struct args_size stack_args_size;
4242 tree fntype = TREE_TYPE (fndecl);
4243 tree fnargs = DECL_ARGUMENTS (fndecl);
4244 /* This is used for the arg pointer when referring to stack args. */
4245 rtx internal_arg_pointer;
4246 /* This is a dummy PARM_DECL that we used for the function result if
4247 the function returns a structure. */
4248 tree function_result_decl = 0;
4249 #ifdef SETUP_INCOMING_VARARGS
4250 int varargs_setup = 0;
4251 #endif
4252 rtx conversion_insns = 0;
4253 struct args_size alignment_pad;
4254
4255 /* Nonzero if the last arg is named `__builtin_va_alist',
4256 which is used on some machines for old-fashioned non-ANSI varargs.h;
4257 this should be stuck onto the stack as if it had arrived there. */
4258 int hide_last_arg
4259 = (current_function_varargs
4260 && fnargs
4261 && (parm = tree_last (fnargs)) != 0
4262 && DECL_NAME (parm)
4263 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4264 "__builtin_va_alist")));
4265
4266 /* Nonzero if function takes extra anonymous args.
4267 This means the last named arg must be on the stack
4268 right before the anonymous ones. */
4269 int stdarg
4270 = (TYPE_ARG_TYPES (fntype) != 0
4271 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4272 != void_type_node));
4273
4274 current_function_stdarg = stdarg;
4275
4276 /* If the reg that the virtual arg pointer will be translated into is
4277 not a fixed reg or is the stack pointer, make a copy of the virtual
4278 arg pointer, and address parms via the copy. The frame pointer is
4279 considered fixed even though it is not marked as such.
4280
4281 The second time through, simply use ap to avoid generating rtx. */
4282
4283 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4284 || ! (fixed_regs[ARG_POINTER_REGNUM]
4285 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4286 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4287 else
4288 internal_arg_pointer = virtual_incoming_args_rtx;
4289 current_function_internal_arg_pointer = internal_arg_pointer;
4290
4291 stack_args_size.constant = 0;
4292 stack_args_size.var = 0;
4293
4294 /* If struct value address is treated as the first argument, make it so. */
4295 if (aggregate_value_p (DECL_RESULT (fndecl))
4296 && ! current_function_returns_pcc_struct
4297 && struct_value_incoming_rtx == 0)
4298 {
4299 tree type = build_pointer_type (TREE_TYPE (fntype));
4300
4301 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4302
4303 DECL_ARG_TYPE (function_result_decl) = type;
4304 TREE_CHAIN (function_result_decl) = fnargs;
4305 fnargs = function_result_decl;
4306 }
4307
4308 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4309 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4310
4311 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4312 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4313 #else
4314 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4315 #endif
4316
4317 /* We haven't yet found an argument that we must push and pretend the
4318 caller did. */
4319 current_function_pretend_args_size = 0;
4320
4321 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4322 {
4323 struct args_size stack_offset;
4324 struct args_size arg_size;
4325 int passed_pointer = 0;
4326 int did_conversion = 0;
4327 tree passed_type = DECL_ARG_TYPE (parm);
4328 tree nominal_type = TREE_TYPE (parm);
4329 int pretend_named;
4330
4331 /* Set LAST_NAMED if this is last named arg before some
4332 anonymous args. */
4333 int last_named = ((TREE_CHAIN (parm) == 0
4334 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4335 && (stdarg || current_function_varargs));
4336 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4337 most machines, if this is a varargs/stdarg function, then we treat
4338 the last named arg as if it were anonymous too. */
4339 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4340
4341 if (TREE_TYPE (parm) == error_mark_node
4342 /* This can happen after weird syntax errors
4343 or if an enum type is defined among the parms. */
4344 || TREE_CODE (parm) != PARM_DECL
4345 || passed_type == NULL)
4346 {
4347 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4348 = gen_rtx_MEM (BLKmode, const0_rtx);
4349 TREE_USED (parm) = 1;
4350 continue;
4351 }
4352
4353 /* For varargs.h function, save info about regs and stack space
4354 used by the individual args, not including the va_alist arg. */
4355 if (hide_last_arg && last_named)
4356 current_function_args_info = args_so_far;
4357
4358 /* Find mode of arg as it is passed, and mode of arg
4359 as it should be during execution of this function. */
4360 passed_mode = TYPE_MODE (passed_type);
4361 nominal_mode = TYPE_MODE (nominal_type);
4362
4363 /* If the parm's mode is VOID, its value doesn't matter,
4364 and avoid the usual things like emit_move_insn that could crash. */
4365 if (nominal_mode == VOIDmode)
4366 {
4367 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4368 continue;
4369 }
4370
4371 /* If the parm is to be passed as a transparent union, use the
4372 type of the first field for the tests below. We have already
4373 verified that the modes are the same. */
4374 if (DECL_TRANSPARENT_UNION (parm)
4375 || (TREE_CODE (passed_type) == UNION_TYPE
4376 && TYPE_TRANSPARENT_UNION (passed_type)))
4377 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4378
4379 /* See if this arg was passed by invisible reference. It is if
4380 it is an object whose size depends on the contents of the
4381 object itself or if the machine requires these objects be passed
4382 that way. */
4383
4384 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4385 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4386 || TREE_ADDRESSABLE (passed_type)
4387 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4388 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4389 passed_type, named_arg)
4390 #endif
4391 )
4392 {
4393 passed_type = nominal_type = build_pointer_type (passed_type);
4394 passed_pointer = 1;
4395 passed_mode = nominal_mode = Pmode;
4396 }
4397
4398 promoted_mode = passed_mode;
4399
4400 #ifdef PROMOTE_FUNCTION_ARGS
4401 /* Compute the mode in which the arg is actually extended to. */
4402 unsignedp = TREE_UNSIGNED (passed_type);
4403 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4404 #endif
4405
4406 /* Let machine desc say which reg (if any) the parm arrives in.
4407 0 means it arrives on the stack. */
4408 #ifdef FUNCTION_INCOMING_ARG
4409 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4410 passed_type, named_arg);
4411 #else
4412 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4413 passed_type, named_arg);
4414 #endif
4415
4416 if (entry_parm == 0)
4417 promoted_mode = passed_mode;
4418
4419 #ifdef SETUP_INCOMING_VARARGS
4420 /* If this is the last named parameter, do any required setup for
4421 varargs or stdargs. We need to know about the case of this being an
4422 addressable type, in which case we skip the registers it
4423 would have arrived in.
4424
4425 For stdargs, LAST_NAMED will be set for two parameters, the one that
4426 is actually the last named, and the dummy parameter. We only
4427 want to do this action once.
4428
4429 Also, indicate when RTL generation is to be suppressed. */
4430 if (last_named && !varargs_setup)
4431 {
4432 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4433 current_function_pretend_args_size, 0);
4434 varargs_setup = 1;
4435 }
4436 #endif
4437
4438 /* Determine parm's home in the stack,
4439 in case it arrives in the stack or we should pretend it did.
4440
4441 Compute the stack position and rtx where the argument arrives
4442 and its size.
4443
4444 There is one complexity here: If this was a parameter that would
4445 have been passed in registers, but wasn't only because it is
4446 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4447 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4448 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4449 0 as it was the previous time. */
4450
4451 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4452 locate_and_pad_parm (promoted_mode, passed_type,
4453 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4454 1,
4455 #else
4456 #ifdef FUNCTION_INCOMING_ARG
4457 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4458 passed_type,
4459 pretend_named) != 0,
4460 #else
4461 FUNCTION_ARG (args_so_far, promoted_mode,
4462 passed_type,
4463 pretend_named) != 0,
4464 #endif
4465 #endif
4466 fndecl, &stack_args_size, &stack_offset, &arg_size,
4467 &alignment_pad);
4468
4469 {
4470 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4471
4472 if (offset_rtx == const0_rtx)
4473 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4474 else
4475 stack_parm = gen_rtx_MEM (promoted_mode,
4476 gen_rtx_PLUS (Pmode,
4477 internal_arg_pointer,
4478 offset_rtx));
4479
4480 set_mem_attributes (stack_parm, parm, 1);
4481 }
4482
4483 /* If this parameter was passed both in registers and in the stack,
4484 use the copy on the stack. */
4485 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4486 entry_parm = 0;
4487
4488 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4489 /* If this parm was passed part in regs and part in memory,
4490 pretend it arrived entirely in memory
4491 by pushing the register-part onto the stack.
4492
4493 In the special case of a DImode or DFmode that is split,
4494 we could put it together in a pseudoreg directly,
4495 but for now that's not worth bothering with. */
4496
4497 if (entry_parm)
4498 {
4499 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4500 passed_type, named_arg);
4501
4502 if (nregs > 0)
4503 {
4504 current_function_pretend_args_size
4505 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4506 / (PARM_BOUNDARY / BITS_PER_UNIT)
4507 * (PARM_BOUNDARY / BITS_PER_UNIT));
4508
4509 /* Handle calls that pass values in multiple non-contiguous
4510 locations. The Irix 6 ABI has examples of this. */
4511 if (GET_CODE (entry_parm) == PARALLEL)
4512 emit_group_store (validize_mem (stack_parm), entry_parm,
4513 int_size_in_bytes (TREE_TYPE (parm)),
4514 TYPE_ALIGN (TREE_TYPE (parm)));
4515
4516 else
4517 move_block_from_reg (REGNO (entry_parm),
4518 validize_mem (stack_parm), nregs,
4519 int_size_in_bytes (TREE_TYPE (parm)));
4520
4521 entry_parm = stack_parm;
4522 }
4523 }
4524 #endif
4525
4526 /* If we didn't decide this parm came in a register,
4527 by default it came on the stack. */
4528 if (entry_parm == 0)
4529 entry_parm = stack_parm;
4530
4531 /* Record permanently how this parm was passed. */
4532 DECL_INCOMING_RTL (parm) = entry_parm;
4533
4534 /* If there is actually space on the stack for this parm,
4535 count it in stack_args_size; otherwise set stack_parm to 0
4536 to indicate there is no preallocated stack slot for the parm. */
4537
4538 if (entry_parm == stack_parm
4539 || (GET_CODE (entry_parm) == PARALLEL
4540 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4541 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4542 /* On some machines, even if a parm value arrives in a register
4543 there is still an (uninitialized) stack slot allocated for it.
4544
4545 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4546 whether this parameter already has a stack slot allocated,
4547 because an arg block exists only if current_function_args_size
4548 is larger than some threshold, and we haven't calculated that
4549 yet. So, for now, we just assume that stack slots never exist
4550 in this case. */
4551 || REG_PARM_STACK_SPACE (fndecl) > 0
4552 #endif
4553 )
4554 {
4555 stack_args_size.constant += arg_size.constant;
4556 if (arg_size.var)
4557 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4558 }
4559 else
4560 /* No stack slot was pushed for this parm. */
4561 stack_parm = 0;
4562
4563 /* Update info on where next arg arrives in registers. */
4564
4565 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4566 passed_type, named_arg);
4567
4568 /* If we can't trust the parm stack slot to be aligned enough
4569 for its ultimate type, don't use that slot after entry.
4570 We'll make another stack slot, if we need one. */
4571 {
4572 unsigned int thisparm_boundary
4573 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4574
4575 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4576 stack_parm = 0;
4577 }
4578
4579 /* If parm was passed in memory, and we need to convert it on entry,
4580 don't store it back in that same slot. */
4581 if (entry_parm != 0
4582 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4583 stack_parm = 0;
4584
4585 /* When an argument is passed in multiple locations, we can't
4586 make use of this information, but we can save some copying if
4587 the whole argument is passed in a single register. */
4588 if (GET_CODE (entry_parm) == PARALLEL
4589 && nominal_mode != BLKmode && passed_mode != BLKmode)
4590 {
4591 int i, len = XVECLEN (entry_parm, 0);
4592
4593 for (i = 0; i < len; i++)
4594 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4595 && GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4596 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4597 == passed_mode)
4598 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4599 {
4600 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4601 DECL_INCOMING_RTL (parm) = entry_parm;
4602 break;
4603 }
4604 }
4605
4606 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4607 in the mode in which it arrives.
4608 STACK_PARM is an RTX for a stack slot where the parameter can live
4609 during the function (in case we want to put it there).
4610 STACK_PARM is 0 if no stack slot was pushed for it.
4611
4612 Now output code if necessary to convert ENTRY_PARM to
4613 the type in which this function declares it,
4614 and store that result in an appropriate place,
4615 which may be a pseudo reg, may be STACK_PARM,
4616 or may be a local stack slot if STACK_PARM is 0.
4617
4618 Set DECL_RTL to that place. */
4619
4620 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4621 {
4622 /* If a BLKmode arrives in registers, copy it to a stack slot.
4623 Handle calls that pass values in multiple non-contiguous
4624 locations. The Irix 6 ABI has examples of this. */
4625 if (GET_CODE (entry_parm) == REG
4626 || GET_CODE (entry_parm) == PARALLEL)
4627 {
4628 int size_stored
4629 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4630 UNITS_PER_WORD);
4631
4632 /* Note that we will be storing an integral number of words.
4633 So we have to be careful to ensure that we allocate an
4634 integral number of words. We do this below in the
4635 assign_stack_local if space was not allocated in the argument
4636 list. If it was, this will not work if PARM_BOUNDARY is not
4637 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4638 if it becomes a problem. */
4639
4640 if (stack_parm == 0)
4641 {
4642 stack_parm
4643 = assign_stack_local (GET_MODE (entry_parm),
4644 size_stored, 0);
4645 set_mem_attributes (stack_parm, parm, 1);
4646 }
4647
4648 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4649 abort ();
4650
4651 /* Handle calls that pass values in multiple non-contiguous
4652 locations. The Irix 6 ABI has examples of this. */
4653 if (GET_CODE (entry_parm) == PARALLEL)
4654 emit_group_store (validize_mem (stack_parm), entry_parm,
4655 int_size_in_bytes (TREE_TYPE (parm)),
4656 TYPE_ALIGN (TREE_TYPE (parm)));
4657 else
4658 move_block_from_reg (REGNO (entry_parm),
4659 validize_mem (stack_parm),
4660 size_stored / UNITS_PER_WORD,
4661 int_size_in_bytes (TREE_TYPE (parm)));
4662 }
4663 DECL_RTL (parm) = stack_parm;
4664 }
4665 else if (! ((! optimize
4666 && ! DECL_REGISTER (parm)
4667 && ! DECL_INLINE (fndecl))
4668 || TREE_SIDE_EFFECTS (parm)
4669 /* If -ffloat-store specified, don't put explicit
4670 float variables into registers. */
4671 || (flag_float_store
4672 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4673 /* Always assign pseudo to structure return or item passed
4674 by invisible reference. */
4675 || passed_pointer || parm == function_result_decl)
4676 {
4677 /* Store the parm in a pseudoregister during the function, but we
4678 may need to do it in a wider mode. */
4679
4680 register rtx parmreg;
4681 unsigned int regno, regnoi = 0, regnor = 0;
4682
4683 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4684
4685 promoted_nominal_mode
4686 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4687
4688 parmreg = gen_reg_rtx (promoted_nominal_mode);
4689 mark_user_reg (parmreg);
4690
4691 /* If this was an item that we received a pointer to, set DECL_RTL
4692 appropriately. */
4693 if (passed_pointer)
4694 {
4695 DECL_RTL (parm)
4696 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4697 set_mem_attributes (DECL_RTL (parm), parm, 1);
4698 }
4699 else
4700 DECL_RTL (parm) = parmreg;
4701
4702 /* Copy the value into the register. */
4703 if (nominal_mode != passed_mode
4704 || promoted_nominal_mode != promoted_mode)
4705 {
4706 int save_tree_used;
4707 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4708 mode, by the caller. We now have to convert it to
4709 NOMINAL_MODE, if different. However, PARMREG may be in
4710 a different mode than NOMINAL_MODE if it is being stored
4711 promoted.
4712
4713 If ENTRY_PARM is a hard register, it might be in a register
4714 not valid for operating in its mode (e.g., an odd-numbered
4715 register for a DFmode). In that case, moves are the only
4716 thing valid, so we can't do a convert from there. This
4717 occurs when the calling sequence allow such misaligned
4718 usages.
4719
4720 In addition, the conversion may involve a call, which could
4721 clobber parameters which haven't been copied to pseudo
4722 registers yet. Therefore, we must first copy the parm to
4723 a pseudo reg here, and save the conversion until after all
4724 parameters have been moved. */
4725
4726 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4727
4728 emit_move_insn (tempreg, validize_mem (entry_parm));
4729
4730 push_to_sequence (conversion_insns);
4731 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4732
4733 /* TREE_USED gets set erroneously during expand_assignment. */
4734 save_tree_used = TREE_USED (parm);
4735 expand_assignment (parm,
4736 make_tree (nominal_type, tempreg), 0, 0);
4737 TREE_USED (parm) = save_tree_used;
4738 conversion_insns = get_insns ();
4739 did_conversion = 1;
4740 end_sequence ();
4741 }
4742 else
4743 emit_move_insn (parmreg, validize_mem (entry_parm));
4744
4745 /* If we were passed a pointer but the actual value
4746 can safely live in a register, put it in one. */
4747 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4748 && ! ((! optimize
4749 && ! DECL_REGISTER (parm)
4750 && ! DECL_INLINE (fndecl))
4751 || TREE_SIDE_EFFECTS (parm)
4752 /* If -ffloat-store specified, don't put explicit
4753 float variables into registers. */
4754 || (flag_float_store
4755 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4756 {
4757 /* We can't use nominal_mode, because it will have been set to
4758 Pmode above. We must use the actual mode of the parm. */
4759 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4760 mark_user_reg (parmreg);
4761 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4762 {
4763 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4764
4765 push_to_sequence (conversion_insns);
4766 emit_move_insn (tempreg, DECL_RTL (parm));
4767 DECL_RTL (parm)
4768 = convert_to_mode (GET_MODE (parmreg), tempreg,
4769 TREE_UNSIGNED (TREE_TYPE (parm)));
4770 emit_move_insn (parmreg, DECL_RTL (parm));
4771 conversion_insns = get_insns();
4772 did_conversion = 1;
4773 end_sequence ();
4774 }
4775 else
4776 emit_move_insn (parmreg, DECL_RTL (parm));
4777 DECL_RTL (parm) = parmreg;
4778 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4779 now the parm. */
4780 stack_parm = 0;
4781 }
4782 #ifdef FUNCTION_ARG_CALLEE_COPIES
4783 /* If we are passed an arg by reference and it is our responsibility
4784 to make a copy, do it now.
4785 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4786 original argument, so we must recreate them in the call to
4787 FUNCTION_ARG_CALLEE_COPIES. */
4788 /* ??? Later add code to handle the case that if the argument isn't
4789 modified, don't do the copy. */
4790
4791 else if (passed_pointer
4792 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4793 TYPE_MODE (DECL_ARG_TYPE (parm)),
4794 DECL_ARG_TYPE (parm),
4795 named_arg)
4796 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4797 {
4798 rtx copy;
4799 tree type = DECL_ARG_TYPE (parm);
4800
4801 /* This sequence may involve a library call perhaps clobbering
4802 registers that haven't been copied to pseudos yet. */
4803
4804 push_to_sequence (conversion_insns);
4805
4806 if (!COMPLETE_TYPE_P (type)
4807 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4808 /* This is a variable sized object. */
4809 copy = gen_rtx_MEM (BLKmode,
4810 allocate_dynamic_stack_space
4811 (expr_size (parm), NULL_RTX,
4812 TYPE_ALIGN (type)));
4813 else
4814 copy = assign_stack_temp (TYPE_MODE (type),
4815 int_size_in_bytes (type), 1);
4816 set_mem_attributes (copy, parm, 1);
4817
4818 store_expr (parm, copy, 0);
4819 emit_move_insn (parmreg, XEXP (copy, 0));
4820 if (current_function_check_memory_usage)
4821 emit_library_call (chkr_set_right_libfunc,
4822 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4823 XEXP (copy, 0), Pmode,
4824 GEN_INT (int_size_in_bytes (type)),
4825 TYPE_MODE (sizetype),
4826 GEN_INT (MEMORY_USE_RW),
4827 TYPE_MODE (integer_type_node));
4828 conversion_insns = get_insns ();
4829 did_conversion = 1;
4830 end_sequence ();
4831 }
4832 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4833
4834 /* In any case, record the parm's desired stack location
4835 in case we later discover it must live in the stack.
4836
4837 If it is a COMPLEX value, store the stack location for both
4838 halves. */
4839
4840 if (GET_CODE (parmreg) == CONCAT)
4841 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4842 else
4843 regno = REGNO (parmreg);
4844
4845 if (regno >= max_parm_reg)
4846 {
4847 rtx *new;
4848 int old_max_parm_reg = max_parm_reg;
4849
4850 /* It's slow to expand this one register at a time,
4851 but it's also rare and we need max_parm_reg to be
4852 precisely correct. */
4853 max_parm_reg = regno + 1;
4854 new = (rtx *) xrealloc (parm_reg_stack_loc,
4855 max_parm_reg * sizeof (rtx));
4856 memset ((char *) (new + old_max_parm_reg), 0,
4857 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4858 parm_reg_stack_loc = new;
4859 }
4860
4861 if (GET_CODE (parmreg) == CONCAT)
4862 {
4863 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4864
4865 regnor = REGNO (gen_realpart (submode, parmreg));
4866 regnoi = REGNO (gen_imagpart (submode, parmreg));
4867
4868 if (stack_parm != 0)
4869 {
4870 parm_reg_stack_loc[regnor]
4871 = gen_realpart (submode, stack_parm);
4872 parm_reg_stack_loc[regnoi]
4873 = gen_imagpart (submode, stack_parm);
4874 }
4875 else
4876 {
4877 parm_reg_stack_loc[regnor] = 0;
4878 parm_reg_stack_loc[regnoi] = 0;
4879 }
4880 }
4881 else
4882 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4883
4884 /* Mark the register as eliminable if we did no conversion
4885 and it was copied from memory at a fixed offset,
4886 and the arg pointer was not copied to a pseudo-reg.
4887 If the arg pointer is a pseudo reg or the offset formed
4888 an invalid address, such memory-equivalences
4889 as we make here would screw up life analysis for it. */
4890 if (nominal_mode == passed_mode
4891 && ! did_conversion
4892 && stack_parm != 0
4893 && GET_CODE (stack_parm) == MEM
4894 && stack_offset.var == 0
4895 && reg_mentioned_p (virtual_incoming_args_rtx,
4896 XEXP (stack_parm, 0)))
4897 {
4898 rtx linsn = get_last_insn ();
4899 rtx sinsn, set;
4900
4901 /* Mark complex types separately. */
4902 if (GET_CODE (parmreg) == CONCAT)
4903 /* Scan backwards for the set of the real and
4904 imaginary parts. */
4905 for (sinsn = linsn; sinsn != 0;
4906 sinsn = prev_nonnote_insn (sinsn))
4907 {
4908 set = single_set (sinsn);
4909 if (set != 0
4910 && SET_DEST (set) == regno_reg_rtx [regnoi])
4911 REG_NOTES (sinsn)
4912 = gen_rtx_EXPR_LIST (REG_EQUIV,
4913 parm_reg_stack_loc[regnoi],
4914 REG_NOTES (sinsn));
4915 else if (set != 0
4916 && SET_DEST (set) == regno_reg_rtx [regnor])
4917 REG_NOTES (sinsn)
4918 = gen_rtx_EXPR_LIST (REG_EQUIV,
4919 parm_reg_stack_loc[regnor],
4920 REG_NOTES (sinsn));
4921 }
4922 else if ((set = single_set (linsn)) != 0
4923 && SET_DEST (set) == parmreg)
4924 REG_NOTES (linsn)
4925 = gen_rtx_EXPR_LIST (REG_EQUIV,
4926 stack_parm, REG_NOTES (linsn));
4927 }
4928
4929 /* For pointer data type, suggest pointer register. */
4930 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4931 mark_reg_pointer (parmreg,
4932 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4933
4934 /* If something wants our address, try to use ADDRESSOF. */
4935 if (TREE_ADDRESSABLE (parm))
4936 {
4937 /* If we end up putting something into the stack,
4938 fixup_var_refs_insns will need to make a pass over
4939 all the instructions. It looks throughs the pending
4940 sequences -- but it can't see the ones in the
4941 CONVERSION_INSNS, if they're not on the sequence
4942 stack. So, we go back to that sequence, just so that
4943 the fixups will happen. */
4944 push_to_sequence (conversion_insns);
4945 put_var_into_stack (parm);
4946 conversion_insns = get_insns ();
4947 end_sequence ();
4948 }
4949 }
4950 else
4951 {
4952 /* Value must be stored in the stack slot STACK_PARM
4953 during function execution. */
4954
4955 if (promoted_mode != nominal_mode)
4956 {
4957 /* Conversion is required. */
4958 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4959
4960 emit_move_insn (tempreg, validize_mem (entry_parm));
4961
4962 push_to_sequence (conversion_insns);
4963 entry_parm = convert_to_mode (nominal_mode, tempreg,
4964 TREE_UNSIGNED (TREE_TYPE (parm)));
4965 if (stack_parm)
4966 {
4967 /* ??? This may need a big-endian conversion on sparc64. */
4968 stack_parm = change_address (stack_parm, nominal_mode,
4969 NULL_RTX);
4970 }
4971 conversion_insns = get_insns ();
4972 did_conversion = 1;
4973 end_sequence ();
4974 }
4975
4976 if (entry_parm != stack_parm)
4977 {
4978 if (stack_parm == 0)
4979 {
4980 stack_parm
4981 = assign_stack_local (GET_MODE (entry_parm),
4982 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4983 set_mem_attributes (stack_parm, parm, 1);
4984 }
4985
4986 if (promoted_mode != nominal_mode)
4987 {
4988 push_to_sequence (conversion_insns);
4989 emit_move_insn (validize_mem (stack_parm),
4990 validize_mem (entry_parm));
4991 conversion_insns = get_insns ();
4992 end_sequence ();
4993 }
4994 else
4995 emit_move_insn (validize_mem (stack_parm),
4996 validize_mem (entry_parm));
4997 }
4998 if (current_function_check_memory_usage)
4999 {
5000 push_to_sequence (conversion_insns);
5001 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK,
5002 VOIDmode, 3, XEXP (stack_parm, 0), Pmode,
5003 GEN_INT (GET_MODE_SIZE (GET_MODE
5004 (entry_parm))),
5005 TYPE_MODE (sizetype),
5006 GEN_INT (MEMORY_USE_RW),
5007 TYPE_MODE (integer_type_node));
5008
5009 conversion_insns = get_insns ();
5010 end_sequence ();
5011 }
5012 DECL_RTL (parm) = stack_parm;
5013 }
5014
5015 /* If this "parameter" was the place where we are receiving the
5016 function's incoming structure pointer, set up the result. */
5017 if (parm == function_result_decl)
5018 {
5019 tree result = DECL_RESULT (fndecl);
5020
5021 DECL_RTL (result)
5022 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
5023
5024 set_mem_attributes (DECL_RTL (result), result, 1);
5025 }
5026 }
5027
5028 /* Output all parameter conversion instructions (possibly including calls)
5029 now that all parameters have been copied out of hard registers. */
5030 emit_insns (conversion_insns);
5031
5032 last_parm_insn = get_last_insn ();
5033
5034 current_function_args_size = stack_args_size.constant;
5035
5036 /* Adjust function incoming argument size for alignment and
5037 minimum length. */
5038
5039 #ifdef REG_PARM_STACK_SPACE
5040 #ifndef MAYBE_REG_PARM_STACK_SPACE
5041 current_function_args_size = MAX (current_function_args_size,
5042 REG_PARM_STACK_SPACE (fndecl));
5043 #endif
5044 #endif
5045
5046 #ifdef STACK_BOUNDARY
5047 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5048
5049 current_function_args_size
5050 = ((current_function_args_size + STACK_BYTES - 1)
5051 / STACK_BYTES) * STACK_BYTES;
5052 #endif
5053
5054 #ifdef ARGS_GROW_DOWNWARD
5055 current_function_arg_offset_rtx
5056 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5057 : expand_expr (size_diffop (stack_args_size.var,
5058 size_int (-stack_args_size.constant)),
5059 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
5060 #else
5061 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5062 #endif
5063
5064 /* See how many bytes, if any, of its args a function should try to pop
5065 on return. */
5066
5067 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5068 current_function_args_size);
5069
5070 /* For stdarg.h function, save info about
5071 regs and stack space used by the named args. */
5072
5073 if (!hide_last_arg)
5074 current_function_args_info = args_so_far;
5075
5076 /* Set the rtx used for the function return value. Put this in its
5077 own variable so any optimizers that need this information don't have
5078 to include tree.h. Do this here so it gets done when an inlined
5079 function gets output. */
5080
5081 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
5082 }
5083 \f
5084 /* Indicate whether REGNO is an incoming argument to the current function
5085 that was promoted to a wider mode. If so, return the RTX for the
5086 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5087 that REGNO is promoted from and whether the promotion was signed or
5088 unsigned. */
5089
5090 #ifdef PROMOTE_FUNCTION_ARGS
5091
5092 rtx
5093 promoted_input_arg (regno, pmode, punsignedp)
5094 unsigned int regno;
5095 enum machine_mode *pmode;
5096 int *punsignedp;
5097 {
5098 tree arg;
5099
5100 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5101 arg = TREE_CHAIN (arg))
5102 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5103 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5104 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5105 {
5106 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5107 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5108
5109 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5110 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5111 && mode != DECL_MODE (arg))
5112 {
5113 *pmode = DECL_MODE (arg);
5114 *punsignedp = unsignedp;
5115 return DECL_INCOMING_RTL (arg);
5116 }
5117 }
5118
5119 return 0;
5120 }
5121
5122 #endif
5123 \f
5124 /* Compute the size and offset from the start of the stacked arguments for a
5125 parm passed in mode PASSED_MODE and with type TYPE.
5126
5127 INITIAL_OFFSET_PTR points to the current offset into the stacked
5128 arguments.
5129
5130 The starting offset and size for this parm are returned in *OFFSET_PTR
5131 and *ARG_SIZE_PTR, respectively.
5132
5133 IN_REGS is non-zero if the argument will be passed in registers. It will
5134 never be set if REG_PARM_STACK_SPACE is not defined.
5135
5136 FNDECL is the function in which the argument was defined.
5137
5138 There are two types of rounding that are done. The first, controlled by
5139 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5140 list to be aligned to the specific boundary (in bits). This rounding
5141 affects the initial and starting offsets, but not the argument size.
5142
5143 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5144 optionally rounds the size of the parm to PARM_BOUNDARY. The
5145 initial offset is not affected by this rounding, while the size always
5146 is and the starting offset may be. */
5147
5148 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5149 initial_offset_ptr is positive because locate_and_pad_parm's
5150 callers pass in the total size of args so far as
5151 initial_offset_ptr. arg_size_ptr is always positive.*/
5152
5153 void
5154 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5155 initial_offset_ptr, offset_ptr, arg_size_ptr,
5156 alignment_pad)
5157 enum machine_mode passed_mode;
5158 tree type;
5159 int in_regs ATTRIBUTE_UNUSED;
5160 tree fndecl ATTRIBUTE_UNUSED;
5161 struct args_size *initial_offset_ptr;
5162 struct args_size *offset_ptr;
5163 struct args_size *arg_size_ptr;
5164 struct args_size *alignment_pad;
5165
5166 {
5167 tree sizetree
5168 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5169 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5170 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5171
5172 #ifdef REG_PARM_STACK_SPACE
5173 /* If we have found a stack parm before we reach the end of the
5174 area reserved for registers, skip that area. */
5175 if (! in_regs)
5176 {
5177 int reg_parm_stack_space = 0;
5178
5179 #ifdef MAYBE_REG_PARM_STACK_SPACE
5180 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5181 #else
5182 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5183 #endif
5184 if (reg_parm_stack_space > 0)
5185 {
5186 if (initial_offset_ptr->var)
5187 {
5188 initial_offset_ptr->var
5189 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5190 ssize_int (reg_parm_stack_space));
5191 initial_offset_ptr->constant = 0;
5192 }
5193 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5194 initial_offset_ptr->constant = reg_parm_stack_space;
5195 }
5196 }
5197 #endif /* REG_PARM_STACK_SPACE */
5198
5199 arg_size_ptr->var = 0;
5200 arg_size_ptr->constant = 0;
5201 alignment_pad->var = 0;
5202 alignment_pad->constant = 0;
5203
5204 #ifdef ARGS_GROW_DOWNWARD
5205 if (initial_offset_ptr->var)
5206 {
5207 offset_ptr->constant = 0;
5208 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5209 initial_offset_ptr->var);
5210 }
5211 else
5212 {
5213 offset_ptr->constant = -initial_offset_ptr->constant;
5214 offset_ptr->var = 0;
5215 }
5216 if (where_pad != none
5217 && (!host_integerp (sizetree, 1)
5218 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5219 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5220 SUB_PARM_SIZE (*offset_ptr, sizetree);
5221 if (where_pad != downward)
5222 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5223 if (initial_offset_ptr->var)
5224 arg_size_ptr->var = size_binop (MINUS_EXPR,
5225 size_binop (MINUS_EXPR,
5226 ssize_int (0),
5227 initial_offset_ptr->var),
5228 offset_ptr->var);
5229
5230 else
5231 arg_size_ptr->constant = (-initial_offset_ptr->constant
5232 - offset_ptr->constant);
5233
5234 #else /* !ARGS_GROW_DOWNWARD */
5235 if (!in_regs
5236 #ifdef REG_PARM_STACK_SPACE
5237 || REG_PARM_STACK_SPACE (fndecl) > 0
5238 #endif
5239 )
5240 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5241 *offset_ptr = *initial_offset_ptr;
5242
5243 #ifdef PUSH_ROUNDING
5244 if (passed_mode != BLKmode)
5245 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5246 #endif
5247
5248 /* Pad_below needs the pre-rounded size to know how much to pad below
5249 so this must be done before rounding up. */
5250 if (where_pad == downward
5251 /* However, BLKmode args passed in regs have their padding done elsewhere.
5252 The stack slot must be able to hold the entire register. */
5253 && !(in_regs && passed_mode == BLKmode))
5254 pad_below (offset_ptr, passed_mode, sizetree);
5255
5256 if (where_pad != none
5257 && (!host_integerp (sizetree, 1)
5258 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5259 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5260
5261 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5262 #endif /* ARGS_GROW_DOWNWARD */
5263 }
5264
5265 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5266 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5267
5268 static void
5269 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5270 struct args_size *offset_ptr;
5271 int boundary;
5272 struct args_size *alignment_pad;
5273 {
5274 tree save_var = NULL_TREE;
5275 HOST_WIDE_INT save_constant = 0;
5276
5277 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5278
5279 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5280 {
5281 save_var = offset_ptr->var;
5282 save_constant = offset_ptr->constant;
5283 }
5284
5285 alignment_pad->var = NULL_TREE;
5286 alignment_pad->constant = 0;
5287
5288 if (boundary > BITS_PER_UNIT)
5289 {
5290 if (offset_ptr->var)
5291 {
5292 offset_ptr->var =
5293 #ifdef ARGS_GROW_DOWNWARD
5294 round_down
5295 #else
5296 round_up
5297 #endif
5298 (ARGS_SIZE_TREE (*offset_ptr),
5299 boundary / BITS_PER_UNIT);
5300 offset_ptr->constant = 0; /*?*/
5301 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5302 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5303 save_var);
5304 }
5305 else
5306 {
5307 offset_ptr->constant =
5308 #ifdef ARGS_GROW_DOWNWARD
5309 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5310 #else
5311 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5312 #endif
5313 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5314 alignment_pad->constant = offset_ptr->constant - save_constant;
5315 }
5316 }
5317 }
5318
5319 #ifndef ARGS_GROW_DOWNWARD
5320 static void
5321 pad_below (offset_ptr, passed_mode, sizetree)
5322 struct args_size *offset_ptr;
5323 enum machine_mode passed_mode;
5324 tree sizetree;
5325 {
5326 if (passed_mode != BLKmode)
5327 {
5328 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5329 offset_ptr->constant
5330 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5331 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5332 - GET_MODE_SIZE (passed_mode));
5333 }
5334 else
5335 {
5336 if (TREE_CODE (sizetree) != INTEGER_CST
5337 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5338 {
5339 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5340 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5341 /* Add it in. */
5342 ADD_PARM_SIZE (*offset_ptr, s2);
5343 SUB_PARM_SIZE (*offset_ptr, sizetree);
5344 }
5345 }
5346 }
5347 #endif
5348 \f
5349 /* Walk the tree of blocks describing the binding levels within a function
5350 and warn about uninitialized variables.
5351 This is done after calling flow_analysis and before global_alloc
5352 clobbers the pseudo-regs to hard regs. */
5353
5354 void
5355 uninitialized_vars_warning (block)
5356 tree block;
5357 {
5358 register tree decl, sub;
5359 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5360 {
5361 if (warn_uninitialized
5362 && TREE_CODE (decl) == VAR_DECL
5363 /* These warnings are unreliable for and aggregates
5364 because assigning the fields one by one can fail to convince
5365 flow.c that the entire aggregate was initialized.
5366 Unions are troublesome because members may be shorter. */
5367 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5368 && DECL_RTL (decl) != 0
5369 && GET_CODE (DECL_RTL (decl)) == REG
5370 /* Global optimizations can make it difficult to determine if a
5371 particular variable has been initialized. However, a VAR_DECL
5372 with a nonzero DECL_INITIAL had an initializer, so do not
5373 claim it is potentially uninitialized.
5374
5375 We do not care about the actual value in DECL_INITIAL, so we do
5376 not worry that it may be a dangling pointer. */
5377 && DECL_INITIAL (decl) == NULL_TREE
5378 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5379 warning_with_decl (decl,
5380 "`%s' might be used uninitialized in this function");
5381 if (extra_warnings
5382 && TREE_CODE (decl) == VAR_DECL
5383 && DECL_RTL (decl) != 0
5384 && GET_CODE (DECL_RTL (decl)) == REG
5385 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5386 warning_with_decl (decl,
5387 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5388 }
5389 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5390 uninitialized_vars_warning (sub);
5391 }
5392
5393 /* Do the appropriate part of uninitialized_vars_warning
5394 but for arguments instead of local variables. */
5395
5396 void
5397 setjmp_args_warning ()
5398 {
5399 register tree decl;
5400 for (decl = DECL_ARGUMENTS (current_function_decl);
5401 decl; decl = TREE_CHAIN (decl))
5402 if (DECL_RTL (decl) != 0
5403 && GET_CODE (DECL_RTL (decl)) == REG
5404 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5405 warning_with_decl (decl,
5406 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5407 }
5408
5409 /* If this function call setjmp, put all vars into the stack
5410 unless they were declared `register'. */
5411
5412 void
5413 setjmp_protect (block)
5414 tree block;
5415 {
5416 register tree decl, sub;
5417 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5418 if ((TREE_CODE (decl) == VAR_DECL
5419 || TREE_CODE (decl) == PARM_DECL)
5420 && DECL_RTL (decl) != 0
5421 && (GET_CODE (DECL_RTL (decl)) == REG
5422 || (GET_CODE (DECL_RTL (decl)) == MEM
5423 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5424 /* If this variable came from an inline function, it must be
5425 that its life doesn't overlap the setjmp. If there was a
5426 setjmp in the function, it would already be in memory. We
5427 must exclude such variable because their DECL_RTL might be
5428 set to strange things such as virtual_stack_vars_rtx. */
5429 && ! DECL_FROM_INLINE (decl)
5430 && (
5431 #ifdef NON_SAVING_SETJMP
5432 /* If longjmp doesn't restore the registers,
5433 don't put anything in them. */
5434 NON_SAVING_SETJMP
5435 ||
5436 #endif
5437 ! DECL_REGISTER (decl)))
5438 put_var_into_stack (decl);
5439 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5440 setjmp_protect (sub);
5441 }
5442 \f
5443 /* Like the previous function, but for args instead of local variables. */
5444
5445 void
5446 setjmp_protect_args ()
5447 {
5448 register tree decl;
5449 for (decl = DECL_ARGUMENTS (current_function_decl);
5450 decl; decl = TREE_CHAIN (decl))
5451 if ((TREE_CODE (decl) == VAR_DECL
5452 || TREE_CODE (decl) == PARM_DECL)
5453 && DECL_RTL (decl) != 0
5454 && (GET_CODE (DECL_RTL (decl)) == REG
5455 || (GET_CODE (DECL_RTL (decl)) == MEM
5456 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5457 && (
5458 /* If longjmp doesn't restore the registers,
5459 don't put anything in them. */
5460 #ifdef NON_SAVING_SETJMP
5461 NON_SAVING_SETJMP
5462 ||
5463 #endif
5464 ! DECL_REGISTER (decl)))
5465 put_var_into_stack (decl);
5466 }
5467 \f
5468 /* Return the context-pointer register corresponding to DECL,
5469 or 0 if it does not need one. */
5470
5471 rtx
5472 lookup_static_chain (decl)
5473 tree decl;
5474 {
5475 tree context = decl_function_context (decl);
5476 tree link;
5477
5478 if (context == 0
5479 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5480 return 0;
5481
5482 /* We treat inline_function_decl as an alias for the current function
5483 because that is the inline function whose vars, types, etc.
5484 are being merged into the current function.
5485 See expand_inline_function. */
5486 if (context == current_function_decl || context == inline_function_decl)
5487 return virtual_stack_vars_rtx;
5488
5489 for (link = context_display; link; link = TREE_CHAIN (link))
5490 if (TREE_PURPOSE (link) == context)
5491 return RTL_EXPR_RTL (TREE_VALUE (link));
5492
5493 abort ();
5494 }
5495 \f
5496 /* Convert a stack slot address ADDR for variable VAR
5497 (from a containing function)
5498 into an address valid in this function (using a static chain). */
5499
5500 rtx
5501 fix_lexical_addr (addr, var)
5502 rtx addr;
5503 tree var;
5504 {
5505 rtx basereg;
5506 HOST_WIDE_INT displacement;
5507 tree context = decl_function_context (var);
5508 struct function *fp;
5509 rtx base = 0;
5510
5511 /* If this is the present function, we need not do anything. */
5512 if (context == current_function_decl || context == inline_function_decl)
5513 return addr;
5514
5515 for (fp = outer_function_chain; fp; fp = fp->next)
5516 if (fp->decl == context)
5517 break;
5518
5519 if (fp == 0)
5520 abort ();
5521
5522 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5523 addr = XEXP (XEXP (addr, 0), 0);
5524
5525 /* Decode given address as base reg plus displacement. */
5526 if (GET_CODE (addr) == REG)
5527 basereg = addr, displacement = 0;
5528 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5529 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5530 else
5531 abort ();
5532
5533 /* We accept vars reached via the containing function's
5534 incoming arg pointer and via its stack variables pointer. */
5535 if (basereg == fp->internal_arg_pointer)
5536 {
5537 /* If reached via arg pointer, get the arg pointer value
5538 out of that function's stack frame.
5539
5540 There are two cases: If a separate ap is needed, allocate a
5541 slot in the outer function for it and dereference it that way.
5542 This is correct even if the real ap is actually a pseudo.
5543 Otherwise, just adjust the offset from the frame pointer to
5544 compensate. */
5545
5546 #ifdef NEED_SEPARATE_AP
5547 rtx addr;
5548
5549 if (fp->x_arg_pointer_save_area == 0)
5550 fp->x_arg_pointer_save_area
5551 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5552
5553 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5554 addr = memory_address (Pmode, addr);
5555
5556 base = gen_rtx_MEM (Pmode, addr);
5557 MEM_ALIAS_SET (base) = get_frame_alias_set ();
5558 base = copy_to_reg (base);
5559 #else
5560 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5561 base = lookup_static_chain (var);
5562 #endif
5563 }
5564
5565 else if (basereg == virtual_stack_vars_rtx)
5566 {
5567 /* This is the same code as lookup_static_chain, duplicated here to
5568 avoid an extra call to decl_function_context. */
5569 tree link;
5570
5571 for (link = context_display; link; link = TREE_CHAIN (link))
5572 if (TREE_PURPOSE (link) == context)
5573 {
5574 base = RTL_EXPR_RTL (TREE_VALUE (link));
5575 break;
5576 }
5577 }
5578
5579 if (base == 0)
5580 abort ();
5581
5582 /* Use same offset, relative to appropriate static chain or argument
5583 pointer. */
5584 return plus_constant (base, displacement);
5585 }
5586 \f
5587 /* Return the address of the trampoline for entering nested fn FUNCTION.
5588 If necessary, allocate a trampoline (in the stack frame)
5589 and emit rtl to initialize its contents (at entry to this function). */
5590
5591 rtx
5592 trampoline_address (function)
5593 tree function;
5594 {
5595 tree link;
5596 tree rtlexp;
5597 rtx tramp;
5598 struct function *fp;
5599 tree fn_context;
5600
5601 /* Find an existing trampoline and return it. */
5602 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5603 if (TREE_PURPOSE (link) == function)
5604 return
5605 adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5606
5607 for (fp = outer_function_chain; fp; fp = fp->next)
5608 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5609 if (TREE_PURPOSE (link) == function)
5610 {
5611 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5612 function);
5613 return adjust_trampoline_addr (tramp);
5614 }
5615
5616 /* None exists; we must make one. */
5617
5618 /* Find the `struct function' for the function containing FUNCTION. */
5619 fp = 0;
5620 fn_context = decl_function_context (function);
5621 if (fn_context != current_function_decl
5622 && fn_context != inline_function_decl)
5623 for (fp = outer_function_chain; fp; fp = fp->next)
5624 if (fp->decl == fn_context)
5625 break;
5626
5627 /* Allocate run-time space for this trampoline
5628 (usually in the defining function's stack frame). */
5629 #ifdef ALLOCATE_TRAMPOLINE
5630 tramp = ALLOCATE_TRAMPOLINE (fp);
5631 #else
5632 /* If rounding needed, allocate extra space
5633 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5634 #ifdef TRAMPOLINE_ALIGNMENT
5635 #define TRAMPOLINE_REAL_SIZE \
5636 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5637 #else
5638 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5639 #endif
5640 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5641 fp ? fp : cfun);
5642 #endif
5643
5644 /* Record the trampoline for reuse and note it for later initialization
5645 by expand_function_end. */
5646 if (fp != 0)
5647 {
5648 rtlexp = make_node (RTL_EXPR);
5649 RTL_EXPR_RTL (rtlexp) = tramp;
5650 fp->x_trampoline_list = tree_cons (function, rtlexp,
5651 fp->x_trampoline_list);
5652 }
5653 else
5654 {
5655 /* Make the RTL_EXPR node temporary, not momentary, so that the
5656 trampoline_list doesn't become garbage. */
5657 rtlexp = make_node (RTL_EXPR);
5658
5659 RTL_EXPR_RTL (rtlexp) = tramp;
5660 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5661 }
5662
5663 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5664 return adjust_trampoline_addr (tramp);
5665 }
5666
5667 /* Given a trampoline address,
5668 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5669
5670 static rtx
5671 round_trampoline_addr (tramp)
5672 rtx tramp;
5673 {
5674 #ifdef TRAMPOLINE_ALIGNMENT
5675 /* Round address up to desired boundary. */
5676 rtx temp = gen_reg_rtx (Pmode);
5677 temp = expand_binop (Pmode, add_optab, tramp,
5678 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5679 temp, 0, OPTAB_LIB_WIDEN);
5680 tramp = expand_binop (Pmode, and_optab, temp,
5681 GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5682 temp, 0, OPTAB_LIB_WIDEN);
5683 #endif
5684 return tramp;
5685 }
5686
5687 /* Given a trampoline address, round it then apply any
5688 platform-specific adjustments so that the result can be used for a
5689 function call . */
5690
5691 static rtx
5692 adjust_trampoline_addr (tramp)
5693 rtx tramp;
5694 {
5695 tramp = round_trampoline_addr (tramp);
5696 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5697 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5698 #endif
5699 return tramp;
5700 }
5701 \f
5702 /* Put all this function's BLOCK nodes including those that are chained
5703 onto the first block into a vector, and return it.
5704 Also store in each NOTE for the beginning or end of a block
5705 the index of that block in the vector.
5706 The arguments are BLOCK, the chain of top-level blocks of the function,
5707 and INSNS, the insn chain of the function. */
5708
5709 void
5710 identify_blocks ()
5711 {
5712 int n_blocks;
5713 tree *block_vector, *last_block_vector;
5714 tree *block_stack;
5715 tree block = DECL_INITIAL (current_function_decl);
5716
5717 if (block == 0)
5718 return;
5719
5720 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5721 depth-first order. */
5722 block_vector = get_block_vector (block, &n_blocks);
5723 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5724
5725 last_block_vector = identify_blocks_1 (get_insns (),
5726 block_vector + 1,
5727 block_vector + n_blocks,
5728 block_stack);
5729
5730 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5731 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5732 if (0 && last_block_vector != block_vector + n_blocks)
5733 abort ();
5734
5735 free (block_vector);
5736 free (block_stack);
5737 }
5738
5739 /* Subroutine of identify_blocks. Do the block substitution on the
5740 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5741
5742 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5743 BLOCK_VECTOR is incremented for each block seen. */
5744
5745 static tree *
5746 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5747 rtx insns;
5748 tree *block_vector;
5749 tree *end_block_vector;
5750 tree *orig_block_stack;
5751 {
5752 rtx insn;
5753 tree *block_stack = orig_block_stack;
5754
5755 for (insn = insns; insn; insn = NEXT_INSN (insn))
5756 {
5757 if (GET_CODE (insn) == NOTE)
5758 {
5759 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5760 {
5761 tree b;
5762
5763 /* If there are more block notes than BLOCKs, something
5764 is badly wrong. */
5765 if (block_vector == end_block_vector)
5766 abort ();
5767
5768 b = *block_vector++;
5769 NOTE_BLOCK (insn) = b;
5770 *block_stack++ = b;
5771 }
5772 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5773 {
5774 /* If there are more NOTE_INSN_BLOCK_ENDs than
5775 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5776 if (block_stack == orig_block_stack)
5777 abort ();
5778
5779 NOTE_BLOCK (insn) = *--block_stack;
5780 }
5781 }
5782 else if (GET_CODE (insn) == CALL_INSN
5783 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5784 {
5785 rtx cp = PATTERN (insn);
5786
5787 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5788 end_block_vector, block_stack);
5789 if (XEXP (cp, 1))
5790 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5791 end_block_vector, block_stack);
5792 if (XEXP (cp, 2))
5793 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5794 end_block_vector, block_stack);
5795 }
5796 }
5797
5798 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5799 something is badly wrong. */
5800 if (block_stack != orig_block_stack)
5801 abort ();
5802
5803 return block_vector;
5804 }
5805
5806 /* Identify BLOCKs referenced by more than one
5807 NOTE_INSN_BLOCK_{BEG,END}, and create duplicate blocks. */
5808
5809 void
5810 reorder_blocks ()
5811 {
5812 tree block = DECL_INITIAL (current_function_decl);
5813 varray_type block_stack;
5814
5815 if (block == NULL_TREE)
5816 return;
5817
5818 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5819
5820 /* Prune the old trees away, so that they don't get in the way. */
5821 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5822 BLOCK_CHAIN (block) = NULL_TREE;
5823
5824 reorder_blocks_0 (get_insns ());
5825 reorder_blocks_1 (get_insns (), block, &block_stack);
5826
5827 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5828
5829 VARRAY_FREE (block_stack);
5830 }
5831
5832 /* Helper function for reorder_blocks. Process the insn chain beginning
5833 at INSNS. Recurse for CALL_PLACEHOLDER insns. */
5834
5835 static void
5836 reorder_blocks_0 (insns)
5837 rtx insns;
5838 {
5839 rtx insn;
5840
5841 for (insn = insns; insn; insn = NEXT_INSN (insn))
5842 {
5843 if (GET_CODE (insn) == NOTE)
5844 {
5845 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5846 {
5847 tree block = NOTE_BLOCK (insn);
5848 TREE_ASM_WRITTEN (block) = 0;
5849 }
5850 }
5851 else if (GET_CODE (insn) == CALL_INSN
5852 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5853 {
5854 rtx cp = PATTERN (insn);
5855 reorder_blocks_0 (XEXP (cp, 0));
5856 if (XEXP (cp, 1))
5857 reorder_blocks_0 (XEXP (cp, 1));
5858 if (XEXP (cp, 2))
5859 reorder_blocks_0 (XEXP (cp, 2));
5860 }
5861 }
5862 }
5863
5864 static void
5865 reorder_blocks_1 (insns, current_block, p_block_stack)
5866 rtx insns;
5867 tree current_block;
5868 varray_type *p_block_stack;
5869 {
5870 rtx insn;
5871
5872 for (insn = insns; insn; insn = NEXT_INSN (insn))
5873 {
5874 if (GET_CODE (insn) == NOTE)
5875 {
5876 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5877 {
5878 tree block = NOTE_BLOCK (insn);
5879 /* If we have seen this block before, copy it. */
5880 if (TREE_ASM_WRITTEN (block))
5881 {
5882 block = copy_node (block);
5883 NOTE_BLOCK (insn) = block;
5884 }
5885 BLOCK_SUBBLOCKS (block) = 0;
5886 TREE_ASM_WRITTEN (block) = 1;
5887 BLOCK_SUPERCONTEXT (block) = current_block;
5888 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5889 BLOCK_SUBBLOCKS (current_block) = block;
5890 current_block = block;
5891 VARRAY_PUSH_TREE (*p_block_stack, block);
5892 }
5893 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5894 {
5895 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5896 VARRAY_POP (*p_block_stack);
5897 BLOCK_SUBBLOCKS (current_block)
5898 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5899 current_block = BLOCK_SUPERCONTEXT (current_block);
5900 }
5901 }
5902 else if (GET_CODE (insn) == CALL_INSN
5903 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5904 {
5905 rtx cp = PATTERN (insn);
5906 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5907 if (XEXP (cp, 1))
5908 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5909 if (XEXP (cp, 2))
5910 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5911 }
5912 }
5913 }
5914
5915 /* Reverse the order of elements in the chain T of blocks,
5916 and return the new head of the chain (old last element). */
5917
5918 static tree
5919 blocks_nreverse (t)
5920 tree t;
5921 {
5922 register tree prev = 0, decl, next;
5923 for (decl = t; decl; decl = next)
5924 {
5925 next = BLOCK_CHAIN (decl);
5926 BLOCK_CHAIN (decl) = prev;
5927 prev = decl;
5928 }
5929 return prev;
5930 }
5931
5932 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
5933 non-NULL, list them all into VECTOR, in a depth-first preorder
5934 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
5935 blocks. */
5936
5937 static int
5938 all_blocks (block, vector)
5939 tree block;
5940 tree *vector;
5941 {
5942 int n_blocks = 0;
5943
5944 while (block)
5945 {
5946 TREE_ASM_WRITTEN (block) = 0;
5947
5948 /* Record this block. */
5949 if (vector)
5950 vector[n_blocks] = block;
5951
5952 ++n_blocks;
5953
5954 /* Record the subblocks, and their subblocks... */
5955 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5956 vector ? vector + n_blocks : 0);
5957 block = BLOCK_CHAIN (block);
5958 }
5959
5960 return n_blocks;
5961 }
5962
5963 /* Return a vector containing all the blocks rooted at BLOCK. The
5964 number of elements in the vector is stored in N_BLOCKS_P. The
5965 vector is dynamically allocated; it is the caller's responsibility
5966 to call `free' on the pointer returned. */
5967
5968 static tree *
5969 get_block_vector (block, n_blocks_p)
5970 tree block;
5971 int *n_blocks_p;
5972 {
5973 tree *block_vector;
5974
5975 *n_blocks_p = all_blocks (block, NULL);
5976 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
5977 all_blocks (block, block_vector);
5978
5979 return block_vector;
5980 }
5981
5982 static int next_block_index = 2;
5983
5984 /* Set BLOCK_NUMBER for all the blocks in FN. */
5985
5986 void
5987 number_blocks (fn)
5988 tree fn;
5989 {
5990 int i;
5991 int n_blocks;
5992 tree *block_vector;
5993
5994 /* For SDB and XCOFF debugging output, we start numbering the blocks
5995 from 1 within each function, rather than keeping a running
5996 count. */
5997 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
5998 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
5999 next_block_index = 1;
6000 #endif
6001
6002 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6003
6004 /* The top-level BLOCK isn't numbered at all. */
6005 for (i = 1; i < n_blocks; ++i)
6006 /* We number the blocks from two. */
6007 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6008
6009 free (block_vector);
6010
6011 return;
6012 }
6013 \f
6014 /* Allocate a function structure and reset its contents to the defaults. */
6015 static void
6016 prepare_function_start ()
6017 {
6018 cfun = (struct function *) xcalloc (1, sizeof (struct function));
6019
6020 init_stmt_for_function ();
6021 init_eh_for_function ();
6022
6023 cse_not_expected = ! optimize;
6024
6025 /* Caller save not needed yet. */
6026 caller_save_needed = 0;
6027
6028 /* No stack slots have been made yet. */
6029 stack_slot_list = 0;
6030
6031 current_function_has_nonlocal_label = 0;
6032 current_function_has_nonlocal_goto = 0;
6033
6034 /* There is no stack slot for handling nonlocal gotos. */
6035 nonlocal_goto_handler_slots = 0;
6036 nonlocal_goto_stack_level = 0;
6037
6038 /* No labels have been declared for nonlocal use. */
6039 nonlocal_labels = 0;
6040 nonlocal_goto_handler_labels = 0;
6041
6042 /* No function calls so far in this function. */
6043 function_call_count = 0;
6044
6045 /* No parm regs have been allocated.
6046 (This is important for output_inline_function.) */
6047 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6048
6049 /* Initialize the RTL mechanism. */
6050 init_emit ();
6051
6052 /* Initialize the queue of pending postincrement and postdecrements,
6053 and some other info in expr.c. */
6054 init_expr ();
6055
6056 /* We haven't done register allocation yet. */
6057 reg_renumber = 0;
6058
6059 init_varasm_status (cfun);
6060
6061 /* Clear out data used for inlining. */
6062 cfun->inlinable = 0;
6063 cfun->original_decl_initial = 0;
6064 cfun->original_arg_vector = 0;
6065
6066 #ifdef STACK_BOUNDARY
6067 cfun->stack_alignment_needed = STACK_BOUNDARY;
6068 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6069 #else
6070 cfun->stack_alignment_needed = 0;
6071 cfun->preferred_stack_boundary = 0;
6072 #endif
6073
6074 /* Set if a call to setjmp is seen. */
6075 current_function_calls_setjmp = 0;
6076
6077 /* Set if a call to longjmp is seen. */
6078 current_function_calls_longjmp = 0;
6079
6080 current_function_calls_alloca = 0;
6081 current_function_contains_functions = 0;
6082 current_function_is_leaf = 0;
6083 current_function_nothrow = 0;
6084 current_function_sp_is_unchanging = 0;
6085 current_function_uses_only_leaf_regs = 0;
6086 current_function_has_computed_jump = 0;
6087 current_function_is_thunk = 0;
6088
6089 current_function_returns_pcc_struct = 0;
6090 current_function_returns_struct = 0;
6091 current_function_epilogue_delay_list = 0;
6092 current_function_uses_const_pool = 0;
6093 current_function_uses_pic_offset_table = 0;
6094 current_function_cannot_inline = 0;
6095
6096 /* We have not yet needed to make a label to jump to for tail-recursion. */
6097 tail_recursion_label = 0;
6098
6099 /* We haven't had a need to make a save area for ap yet. */
6100 arg_pointer_save_area = 0;
6101
6102 /* No stack slots allocated yet. */
6103 frame_offset = 0;
6104
6105 /* No SAVE_EXPRs in this function yet. */
6106 save_expr_regs = 0;
6107
6108 /* No RTL_EXPRs in this function yet. */
6109 rtl_expr_chain = 0;
6110
6111 /* Set up to allocate temporaries. */
6112 init_temp_slots ();
6113
6114 /* Indicate that we need to distinguish between the return value of the
6115 present function and the return value of a function being called. */
6116 rtx_equal_function_value_matters = 1;
6117
6118 /* Indicate that we have not instantiated virtual registers yet. */
6119 virtuals_instantiated = 0;
6120
6121 /* Indicate that we want CONCATs now. */
6122 generating_concat_p = 1;
6123
6124 /* Indicate we have no need of a frame pointer yet. */
6125 frame_pointer_needed = 0;
6126
6127 /* By default assume not varargs or stdarg. */
6128 current_function_varargs = 0;
6129 current_function_stdarg = 0;
6130
6131 /* We haven't made any trampolines for this function yet. */
6132 trampoline_list = 0;
6133
6134 init_pending_stack_adjust ();
6135 inhibit_defer_pop = 0;
6136
6137 current_function_outgoing_args_size = 0;
6138
6139 if (init_lang_status)
6140 (*init_lang_status) (cfun);
6141 if (init_machine_status)
6142 (*init_machine_status) (cfun);
6143 }
6144
6145 /* Initialize the rtl expansion mechanism so that we can do simple things
6146 like generate sequences. This is used to provide a context during global
6147 initialization of some passes. */
6148 void
6149 init_dummy_function_start ()
6150 {
6151 prepare_function_start ();
6152 }
6153
6154 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6155 and initialize static variables for generating RTL for the statements
6156 of the function. */
6157
6158 void
6159 init_function_start (subr, filename, line)
6160 tree subr;
6161 const char *filename;
6162 int line;
6163 {
6164 prepare_function_start ();
6165
6166 /* Remember this function for later. */
6167 cfun->next_global = all_functions;
6168 all_functions = cfun;
6169
6170 current_function_name = (*decl_printable_name) (subr, 2);
6171 cfun->decl = subr;
6172
6173 /* Nonzero if this is a nested function that uses a static chain. */
6174
6175 current_function_needs_context
6176 = (decl_function_context (current_function_decl) != 0
6177 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6178
6179 /* Within function body, compute a type's size as soon it is laid out. */
6180 immediate_size_expand++;
6181
6182 /* Prevent ever trying to delete the first instruction of a function.
6183 Also tell final how to output a linenum before the function prologue.
6184 Note linenums could be missing, e.g. when compiling a Java .class file. */
6185 if (line > 0)
6186 emit_line_note (filename, line);
6187
6188 /* Make sure first insn is a note even if we don't want linenums.
6189 This makes sure the first insn will never be deleted.
6190 Also, final expects a note to appear there. */
6191 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6192
6193 /* Set flags used by final.c. */
6194 if (aggregate_value_p (DECL_RESULT (subr)))
6195 {
6196 #ifdef PCC_STATIC_STRUCT_RETURN
6197 current_function_returns_pcc_struct = 1;
6198 #endif
6199 current_function_returns_struct = 1;
6200 }
6201
6202 /* Warn if this value is an aggregate type,
6203 regardless of which calling convention we are using for it. */
6204 if (warn_aggregate_return
6205 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6206 warning ("function returns an aggregate");
6207
6208 current_function_returns_pointer
6209 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6210 }
6211
6212 /* Make sure all values used by the optimization passes have sane
6213 defaults. */
6214 void
6215 init_function_for_compilation ()
6216 {
6217 reg_renumber = 0;
6218
6219 /* No prologue/epilogue insns yet. */
6220 VARRAY_GROW (prologue, 0);
6221 VARRAY_GROW (epilogue, 0);
6222 VARRAY_GROW (sibcall_epilogue, 0);
6223 }
6224
6225 /* Indicate that the current function uses extra args
6226 not explicitly mentioned in the argument list in any fashion. */
6227
6228 void
6229 mark_varargs ()
6230 {
6231 current_function_varargs = 1;
6232 }
6233
6234 /* Expand a call to __main at the beginning of a possible main function. */
6235
6236 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6237 #undef HAS_INIT_SECTION
6238 #define HAS_INIT_SECTION
6239 #endif
6240
6241 void
6242 expand_main_function ()
6243 {
6244 #if !defined (HAS_INIT_SECTION)
6245 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
6246 VOIDmode, 0);
6247 #endif /* not HAS_INIT_SECTION */
6248 }
6249 \f
6250 extern struct obstack permanent_obstack;
6251
6252 /* Start the RTL for a new function, and set variables used for
6253 emitting RTL.
6254 SUBR is the FUNCTION_DECL node.
6255 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6256 the function's parameters, which must be run at any return statement. */
6257
6258 void
6259 expand_function_start (subr, parms_have_cleanups)
6260 tree subr;
6261 int parms_have_cleanups;
6262 {
6263 tree tem;
6264 rtx last_ptr = NULL_RTX;
6265
6266 /* Make sure volatile mem refs aren't considered
6267 valid operands of arithmetic insns. */
6268 init_recog_no_volatile ();
6269
6270 /* Set this before generating any memory accesses. */
6271 current_function_check_memory_usage
6272 = (flag_check_memory_usage
6273 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6274
6275 current_function_instrument_entry_exit
6276 = (flag_instrument_function_entry_exit
6277 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6278
6279 current_function_limit_stack
6280 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6281
6282 /* If function gets a static chain arg, store it in the stack frame.
6283 Do this first, so it gets the first stack slot offset. */
6284 if (current_function_needs_context)
6285 {
6286 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6287
6288 /* Delay copying static chain if it is not a register to avoid
6289 conflicts with regs used for parameters. */
6290 if (! SMALL_REGISTER_CLASSES
6291 || GET_CODE (static_chain_incoming_rtx) == REG)
6292 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6293 }
6294
6295 /* If the parameters of this function need cleaning up, get a label
6296 for the beginning of the code which executes those cleanups. This must
6297 be done before doing anything with return_label. */
6298 if (parms_have_cleanups)
6299 cleanup_label = gen_label_rtx ();
6300 else
6301 cleanup_label = 0;
6302
6303 /* Make the label for return statements to jump to, if this machine
6304 does not have a one-instruction return and uses an epilogue,
6305 or if it returns a structure, or if it has parm cleanups. */
6306 #ifdef HAVE_return
6307 if (cleanup_label == 0 && HAVE_return
6308 && ! current_function_instrument_entry_exit
6309 && ! current_function_returns_pcc_struct
6310 && ! (current_function_returns_struct && ! optimize))
6311 return_label = 0;
6312 else
6313 return_label = gen_label_rtx ();
6314 #else
6315 return_label = gen_label_rtx ();
6316 #endif
6317
6318 /* Initialize rtx used to return the value. */
6319 /* Do this before assign_parms so that we copy the struct value address
6320 before any library calls that assign parms might generate. */
6321
6322 /* Decide whether to return the value in memory or in a register. */
6323 if (aggregate_value_p (DECL_RESULT (subr)))
6324 {
6325 /* Returning something that won't go in a register. */
6326 register rtx value_address = 0;
6327
6328 #ifdef PCC_STATIC_STRUCT_RETURN
6329 if (current_function_returns_pcc_struct)
6330 {
6331 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6332 value_address = assemble_static_space (size);
6333 }
6334 else
6335 #endif
6336 {
6337 /* Expect to be passed the address of a place to store the value.
6338 If it is passed as an argument, assign_parms will take care of
6339 it. */
6340 if (struct_value_incoming_rtx)
6341 {
6342 value_address = gen_reg_rtx (Pmode);
6343 emit_move_insn (value_address, struct_value_incoming_rtx);
6344 }
6345 }
6346 if (value_address)
6347 {
6348 DECL_RTL (DECL_RESULT (subr))
6349 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6350 set_mem_attributes (DECL_RTL (DECL_RESULT (subr)),
6351 DECL_RESULT (subr), 1);
6352 }
6353 }
6354 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6355 /* If return mode is void, this decl rtl should not be used. */
6356 DECL_RTL (DECL_RESULT (subr)) = 0;
6357 else if (parms_have_cleanups || current_function_instrument_entry_exit)
6358 {
6359 /* If function will end with cleanup code for parms,
6360 compute the return values into a pseudo reg,
6361 which we will copy into the true return register
6362 after the cleanups are done. */
6363
6364 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
6365
6366 #ifdef PROMOTE_FUNCTION_RETURN
6367 tree type = TREE_TYPE (DECL_RESULT (subr));
6368 int unsignedp = TREE_UNSIGNED (type);
6369
6370 mode = promote_mode (type, mode, &unsignedp, 1);
6371 #endif
6372
6373 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6374 }
6375 else
6376 /* Scalar, returned in a register. */
6377 {
6378 DECL_RTL (DECL_RESULT (subr))
6379 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)), subr, 1);
6380
6381 /* Mark this reg as the function's return value. */
6382 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6383 {
6384 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6385 /* Needed because we may need to move this to memory
6386 in case it's a named return value whose address is taken. */
6387 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6388 }
6389 }
6390
6391 /* Initialize rtx for parameters and local variables.
6392 In some cases this requires emitting insns. */
6393
6394 assign_parms (subr);
6395
6396 /* Copy the static chain now if it wasn't a register. The delay is to
6397 avoid conflicts with the parameter passing registers. */
6398
6399 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6400 if (GET_CODE (static_chain_incoming_rtx) != REG)
6401 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6402
6403 /* The following was moved from init_function_start.
6404 The move is supposed to make sdb output more accurate. */
6405 /* Indicate the beginning of the function body,
6406 as opposed to parm setup. */
6407 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6408
6409 if (GET_CODE (get_last_insn ()) != NOTE)
6410 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6411 parm_birth_insn = get_last_insn ();
6412
6413 context_display = 0;
6414 if (current_function_needs_context)
6415 {
6416 /* Fetch static chain values for containing functions. */
6417 tem = decl_function_context (current_function_decl);
6418 /* Copy the static chain pointer into a pseudo. If we have
6419 small register classes, copy the value from memory if
6420 static_chain_incoming_rtx is a REG. */
6421 if (tem)
6422 {
6423 /* If the static chain originally came in a register, put it back
6424 there, then move it out in the next insn. The reason for
6425 this peculiar code is to satisfy function integration. */
6426 if (SMALL_REGISTER_CLASSES
6427 && GET_CODE (static_chain_incoming_rtx) == REG)
6428 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6429 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6430 }
6431
6432 while (tem)
6433 {
6434 tree rtlexp = make_node (RTL_EXPR);
6435
6436 RTL_EXPR_RTL (rtlexp) = last_ptr;
6437 context_display = tree_cons (tem, rtlexp, context_display);
6438 tem = decl_function_context (tem);
6439 if (tem == 0)
6440 break;
6441 /* Chain thru stack frames, assuming pointer to next lexical frame
6442 is found at the place we always store it. */
6443 #ifdef FRAME_GROWS_DOWNWARD
6444 last_ptr = plus_constant (last_ptr,
6445 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6446 #endif
6447 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6448 MEM_ALIAS_SET (last_ptr) = get_frame_alias_set ();
6449 last_ptr = copy_to_reg (last_ptr);
6450
6451 /* If we are not optimizing, ensure that we know that this
6452 piece of context is live over the entire function. */
6453 if (! optimize)
6454 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6455 save_expr_regs);
6456 }
6457 }
6458
6459 if (current_function_instrument_entry_exit)
6460 {
6461 rtx fun = DECL_RTL (current_function_decl);
6462 if (GET_CODE (fun) == MEM)
6463 fun = XEXP (fun, 0);
6464 else
6465 abort ();
6466 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6467 fun, Pmode,
6468 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6469 0,
6470 hard_frame_pointer_rtx),
6471 Pmode);
6472 }
6473
6474 #ifdef PROFILE_HOOK
6475 if (profile_flag)
6476 PROFILE_HOOK (profile_label_no);
6477 #endif
6478
6479 /* After the display initializations is where the tail-recursion label
6480 should go, if we end up needing one. Ensure we have a NOTE here
6481 since some things (like trampolines) get placed before this. */
6482 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6483
6484 /* Evaluate now the sizes of any types declared among the arguments. */
6485 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6486 {
6487 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6488 EXPAND_MEMORY_USE_BAD);
6489 /* Flush the queue in case this parameter declaration has
6490 side-effects. */
6491 emit_queue ();
6492 }
6493
6494 /* Make sure there is a line number after the function entry setup code. */
6495 force_next_line_note ();
6496 }
6497 \f
6498 /* Undo the effects of init_dummy_function_start. */
6499 void
6500 expand_dummy_function_end ()
6501 {
6502 /* End any sequences that failed to be closed due to syntax errors. */
6503 while (in_sequence_p ())
6504 end_sequence ();
6505
6506 /* Outside function body, can't compute type's actual size
6507 until next function's body starts. */
6508
6509 free_after_parsing (cfun);
6510 free_after_compilation (cfun);
6511 free (cfun);
6512 cfun = 0;
6513 }
6514
6515 /* Call DOIT for each hard register used as a return value from
6516 the current function. */
6517
6518 void
6519 diddle_return_value (doit, arg)
6520 void (*doit) PARAMS ((rtx, void *));
6521 void *arg;
6522 {
6523 rtx outgoing = current_function_return_rtx;
6524 int pcc;
6525
6526 if (! outgoing)
6527 return;
6528
6529 pcc = (current_function_returns_struct
6530 || current_function_returns_pcc_struct);
6531
6532 if ((GET_CODE (outgoing) == REG
6533 && REGNO (outgoing) >= FIRST_PSEUDO_REGISTER)
6534 || pcc)
6535 {
6536 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6537
6538 /* A PCC-style return returns a pointer to the memory in which
6539 the structure is stored. */
6540 if (pcc)
6541 type = build_pointer_type (type);
6542
6543 #ifdef FUNCTION_OUTGOING_VALUE
6544 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
6545 #else
6546 outgoing = FUNCTION_VALUE (type, current_function_decl);
6547 #endif
6548 /* If this is a BLKmode structure being returned in registers, then use
6549 the mode computed in expand_return. */
6550 if (GET_MODE (outgoing) == BLKmode)
6551 PUT_MODE (outgoing, GET_MODE (current_function_return_rtx));
6552 REG_FUNCTION_VALUE_P (outgoing) = 1;
6553 }
6554
6555 if (GET_CODE (outgoing) == REG)
6556 (*doit) (outgoing, arg);
6557 else if (GET_CODE (outgoing) == PARALLEL)
6558 {
6559 int i;
6560
6561 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6562 {
6563 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6564
6565 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6566 (*doit) (x, arg);
6567 }
6568 }
6569 }
6570
6571 static void
6572 do_clobber_return_reg (reg, arg)
6573 rtx reg;
6574 void *arg ATTRIBUTE_UNUSED;
6575 {
6576 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6577 }
6578
6579 void
6580 clobber_return_register ()
6581 {
6582 diddle_return_value (do_clobber_return_reg, NULL);
6583 }
6584
6585 static void
6586 do_use_return_reg (reg, arg)
6587 rtx reg;
6588 void *arg ATTRIBUTE_UNUSED;
6589 {
6590 emit_insn (gen_rtx_USE (VOIDmode, reg));
6591 }
6592
6593 void
6594 use_return_register ()
6595 {
6596 diddle_return_value (do_use_return_reg, NULL);
6597 }
6598
6599 /* Generate RTL for the end of the current function.
6600 FILENAME and LINE are the current position in the source file.
6601
6602 It is up to language-specific callers to do cleanups for parameters--
6603 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6604
6605 void
6606 expand_function_end (filename, line, end_bindings)
6607 const char *filename;
6608 int line;
6609 int end_bindings;
6610 {
6611 tree link;
6612
6613 #ifdef TRAMPOLINE_TEMPLATE
6614 static rtx initial_trampoline;
6615 #endif
6616
6617 finish_expr_for_function ();
6618
6619 #ifdef NON_SAVING_SETJMP
6620 /* Don't put any variables in registers if we call setjmp
6621 on a machine that fails to restore the registers. */
6622 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6623 {
6624 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6625 setjmp_protect (DECL_INITIAL (current_function_decl));
6626
6627 setjmp_protect_args ();
6628 }
6629 #endif
6630
6631 /* Save the argument pointer if a save area was made for it. */
6632 if (arg_pointer_save_area)
6633 {
6634 /* arg_pointer_save_area may not be a valid memory address, so we
6635 have to check it and fix it if necessary. */
6636 rtx seq;
6637 start_sequence ();
6638 emit_move_insn (validize_mem (arg_pointer_save_area),
6639 virtual_incoming_args_rtx);
6640 seq = gen_sequence ();
6641 end_sequence ();
6642 emit_insn_before (seq, tail_recursion_reentry);
6643 }
6644
6645 /* Initialize any trampolines required by this function. */
6646 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6647 {
6648 tree function = TREE_PURPOSE (link);
6649 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6650 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6651 #ifdef TRAMPOLINE_TEMPLATE
6652 rtx blktramp;
6653 #endif
6654 rtx seq;
6655
6656 #ifdef TRAMPOLINE_TEMPLATE
6657 /* First make sure this compilation has a template for
6658 initializing trampolines. */
6659 if (initial_trampoline == 0)
6660 {
6661 initial_trampoline
6662 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6663
6664 ggc_add_rtx_root (&initial_trampoline, 1);
6665 }
6666 #endif
6667
6668 /* Generate insns to initialize the trampoline. */
6669 start_sequence ();
6670 tramp = round_trampoline_addr (XEXP (tramp, 0));
6671 #ifdef TRAMPOLINE_TEMPLATE
6672 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6673 emit_block_move (blktramp, initial_trampoline,
6674 GEN_INT (TRAMPOLINE_SIZE),
6675 TRAMPOLINE_ALIGNMENT);
6676 #endif
6677 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6678 seq = get_insns ();
6679 end_sequence ();
6680
6681 /* Put those insns at entry to the containing function (this one). */
6682 emit_insns_before (seq, tail_recursion_reentry);
6683 }
6684
6685 /* If we are doing stack checking and this function makes calls,
6686 do a stack probe at the start of the function to ensure we have enough
6687 space for another stack frame. */
6688 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6689 {
6690 rtx insn, seq;
6691
6692 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6693 if (GET_CODE (insn) == CALL_INSN)
6694 {
6695 start_sequence ();
6696 probe_stack_range (STACK_CHECK_PROTECT,
6697 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6698 seq = get_insns ();
6699 end_sequence ();
6700 emit_insns_before (seq, tail_recursion_reentry);
6701 break;
6702 }
6703 }
6704
6705 /* Warn about unused parms if extra warnings were specified. */
6706 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6707 warning. WARN_UNUSED_PARAMETER is negative when set by
6708 -Wunused. */
6709 if (warn_unused_parameter > 0
6710 || (warn_unused_parameter < 0 && extra_warnings))
6711 {
6712 tree decl;
6713
6714 for (decl = DECL_ARGUMENTS (current_function_decl);
6715 decl; decl = TREE_CHAIN (decl))
6716 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6717 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6718 warning_with_decl (decl, "unused parameter `%s'");
6719 }
6720
6721 /* Delete handlers for nonlocal gotos if nothing uses them. */
6722 if (nonlocal_goto_handler_slots != 0
6723 && ! current_function_has_nonlocal_label)
6724 delete_handlers ();
6725
6726 /* End any sequences that failed to be closed due to syntax errors. */
6727 while (in_sequence_p ())
6728 end_sequence ();
6729
6730 /* Outside function body, can't compute type's actual size
6731 until next function's body starts. */
6732 immediate_size_expand--;
6733
6734 clear_pending_stack_adjust ();
6735 do_pending_stack_adjust ();
6736
6737 /* Mark the end of the function body.
6738 If control reaches this insn, the function can drop through
6739 without returning a value. */
6740 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6741
6742 /* Must mark the last line number note in the function, so that the test
6743 coverage code can avoid counting the last line twice. This just tells
6744 the code to ignore the immediately following line note, since there
6745 already exists a copy of this note somewhere above. This line number
6746 note is still needed for debugging though, so we can't delete it. */
6747 if (flag_test_coverage)
6748 emit_note (NULL_PTR, NOTE_INSN_REPEATED_LINE_NUMBER);
6749
6750 /* Output a linenumber for the end of the function.
6751 SDB depends on this. */
6752 emit_line_note_force (filename, line);
6753
6754 /* Output the label for the actual return from the function,
6755 if one is expected. This happens either because a function epilogue
6756 is used instead of a return instruction, or because a return was done
6757 with a goto in order to run local cleanups, or because of pcc-style
6758 structure returning. */
6759
6760 if (return_label)
6761 {
6762 rtx before, after;
6763
6764 /* Before the return label, clobber the return registers so that
6765 they are not propogated live to the rest of the function. This
6766 can only happen with functions that drop through; if there had
6767 been a return statement, there would have either been a return
6768 rtx, or a jump to the return label. */
6769
6770 before = get_last_insn ();
6771 clobber_return_register ();
6772 after = get_last_insn ();
6773
6774 if (before != after)
6775 cfun->x_clobber_return_insn = after;
6776
6777 emit_label (return_label);
6778 }
6779
6780 /* C++ uses this. */
6781 if (end_bindings)
6782 expand_end_bindings (0, 0, 0);
6783
6784 /* Now handle any leftover exception regions that may have been
6785 created for the parameters. */
6786 {
6787 rtx last = get_last_insn ();
6788 rtx label;
6789
6790 expand_leftover_cleanups ();
6791
6792 /* If there are any catch_clauses remaining, output them now. */
6793 emit_insns (catch_clauses);
6794 catch_clauses = catch_clauses_last = NULL_RTX;
6795 /* If the above emitted any code, may sure we jump around it. */
6796 if (last != get_last_insn ())
6797 {
6798 label = gen_label_rtx ();
6799 last = emit_jump_insn_after (gen_jump (label), last);
6800 last = emit_barrier_after (last);
6801 emit_label (label);
6802 }
6803 }
6804
6805 if (current_function_instrument_entry_exit)
6806 {
6807 rtx fun = DECL_RTL (current_function_decl);
6808 if (GET_CODE (fun) == MEM)
6809 fun = XEXP (fun, 0);
6810 else
6811 abort ();
6812 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6813 fun, Pmode,
6814 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6815 0,
6816 hard_frame_pointer_rtx),
6817 Pmode);
6818 }
6819
6820 /* If we had calls to alloca, and this machine needs
6821 an accurate stack pointer to exit the function,
6822 insert some code to save and restore the stack pointer. */
6823 #ifdef EXIT_IGNORE_STACK
6824 if (! EXIT_IGNORE_STACK)
6825 #endif
6826 if (current_function_calls_alloca)
6827 {
6828 rtx tem = 0;
6829
6830 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6831 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6832 }
6833
6834 /* If scalar return value was computed in a pseudo-reg, or was a named
6835 return value that got dumped to the stack, copy that to the hard
6836 return register. */
6837 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0)
6838 {
6839 tree decl_result = DECL_RESULT (current_function_decl);
6840 rtx decl_rtl = DECL_RTL (decl_result);
6841
6842 if (REG_P (decl_rtl)
6843 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
6844 : DECL_REGISTER (decl_result))
6845 {
6846 rtx real_decl_rtl;
6847
6848 #ifdef FUNCTION_OUTGOING_VALUE
6849 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
6850 current_function_decl);
6851 #else
6852 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
6853 current_function_decl);
6854 #endif
6855 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
6856
6857 /* If this is a BLKmode structure being returned in registers,
6858 then use the mode computed in expand_return. Note that if
6859 decl_rtl is memory, then its mode may have been changed,
6860 but that current_function_return_rtx has not. */
6861 if (GET_MODE (real_decl_rtl) == BLKmode)
6862 PUT_MODE (real_decl_rtl, GET_MODE (current_function_return_rtx));
6863
6864 /* If a named return value dumped decl_return to memory, then
6865 we may need to re-do the PROMOTE_MODE signed/unsigned
6866 extension. */
6867 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
6868 {
6869 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
6870
6871 #ifdef PROMOTE_FUNCTION_RETURN
6872 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
6873 &unsignedp, 1);
6874 #endif
6875
6876 convert_move (real_decl_rtl, decl_rtl, unsignedp);
6877 }
6878 else
6879 emit_move_insn (real_decl_rtl, decl_rtl);
6880
6881 /* The delay slot scheduler assumes that current_function_return_rtx
6882 holds the hard register containing the return value, not a
6883 temporary pseudo. */
6884 current_function_return_rtx = real_decl_rtl;
6885 }
6886 }
6887
6888 /* If returning a structure, arrange to return the address of the value
6889 in a place where debuggers expect to find it.
6890
6891 If returning a structure PCC style,
6892 the caller also depends on this value.
6893 And current_function_returns_pcc_struct is not necessarily set. */
6894 if (current_function_returns_struct
6895 || current_function_returns_pcc_struct)
6896 {
6897 rtx value_address
6898 = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6899 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6900 #ifdef FUNCTION_OUTGOING_VALUE
6901 rtx outgoing
6902 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6903 current_function_decl);
6904 #else
6905 rtx outgoing
6906 = FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
6907 #endif
6908
6909 /* Mark this as a function return value so integrate will delete the
6910 assignment and USE below when inlining this function. */
6911 REG_FUNCTION_VALUE_P (outgoing) = 1;
6912
6913 #ifdef POINTERS_EXTEND_UNSIGNED
6914 /* The address may be ptr_mode and OUTGOING may be Pmode. */
6915 if (GET_MODE (outgoing) != GET_MODE (value_address))
6916 value_address = convert_memory_address (GET_MODE (outgoing),
6917 value_address);
6918 #endif
6919
6920 emit_move_insn (outgoing, value_address);
6921
6922 /* Show return register used to hold result (in this case the address
6923 of the result. */
6924 current_function_return_rtx = outgoing;
6925 }
6926
6927 /* ??? This should no longer be necessary since stupid is no longer with
6928 us, but there are some parts of the compiler (eg reload_combine, and
6929 sh mach_dep_reorg) that still try and compute their own lifetime info
6930 instead of using the general framework. */
6931 use_return_register ();
6932
6933 /* If this is an implementation of __throw, do what's necessary to
6934 communicate between __builtin_eh_return and the epilogue. */
6935 expand_eh_return ();
6936
6937 /* Output a return insn if we are using one.
6938 Otherwise, let the rtl chain end here, to drop through
6939 into the epilogue. */
6940
6941 #ifdef HAVE_return
6942 if (HAVE_return)
6943 {
6944 emit_jump_insn (gen_return ());
6945 emit_barrier ();
6946 }
6947 #endif
6948
6949 /* Fix up any gotos that jumped out to the outermost
6950 binding level of the function.
6951 Must follow emitting RETURN_LABEL. */
6952
6953 /* If you have any cleanups to do at this point,
6954 and they need to create temporary variables,
6955 then you will lose. */
6956 expand_fixups (get_insns ());
6957 }
6958 \f
6959 /* Extend a vector that records the INSN_UIDs of INSNS (either a
6960 sequence or a single insn). */
6961
6962 static void
6963 record_insns (insns, vecp)
6964 rtx insns;
6965 varray_type *vecp;
6966 {
6967 if (GET_CODE (insns) == SEQUENCE)
6968 {
6969 int len = XVECLEN (insns, 0);
6970 int i = VARRAY_SIZE (*vecp);
6971
6972 VARRAY_GROW (*vecp, i + len);
6973 while (--len >= 0)
6974 {
6975 VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
6976 ++i;
6977 }
6978 }
6979 else
6980 {
6981 int i = VARRAY_SIZE (*vecp);
6982 VARRAY_GROW (*vecp, i + 1);
6983 VARRAY_INT (*vecp, i) = INSN_UID (insns);
6984 }
6985 }
6986
6987 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6988
6989 static int
6990 contains (insn, vec)
6991 rtx insn;
6992 varray_type vec;
6993 {
6994 register int i, j;
6995
6996 if (GET_CODE (insn) == INSN
6997 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6998 {
6999 int count = 0;
7000 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7001 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7002 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7003 count++;
7004 return count;
7005 }
7006 else
7007 {
7008 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7009 if (INSN_UID (insn) == VARRAY_INT (vec, j))
7010 return 1;
7011 }
7012 return 0;
7013 }
7014
7015 int
7016 prologue_epilogue_contains (insn)
7017 rtx insn;
7018 {
7019 if (contains (insn, prologue))
7020 return 1;
7021 if (contains (insn, epilogue))
7022 return 1;
7023 return 0;
7024 }
7025
7026 int
7027 sibcall_epilogue_contains (insn)
7028 rtx insn;
7029 {
7030 if (sibcall_epilogue)
7031 return contains (insn, sibcall_epilogue);
7032 return 0;
7033 }
7034
7035 #ifdef HAVE_return
7036 /* Insert gen_return at the end of block BB. This also means updating
7037 block_for_insn appropriately. */
7038
7039 static void
7040 emit_return_into_block (bb, line_note)
7041 basic_block bb;
7042 rtx line_note;
7043 {
7044 rtx p, end;
7045
7046 p = NEXT_INSN (bb->end);
7047 end = emit_jump_insn_after (gen_return (), bb->end);
7048 if (line_note)
7049 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7050 NOTE_LINE_NUMBER (line_note), bb->end);
7051
7052 while (1)
7053 {
7054 set_block_for_insn (p, bb);
7055 if (p == bb->end)
7056 break;
7057 p = PREV_INSN (p);
7058 }
7059 bb->end = end;
7060 }
7061 #endif /* HAVE_return */
7062
7063 #ifdef HAVE_epilogue
7064
7065 /* Modify SEQ, a SEQUENCE that is part of the epilogue, to no modifications
7066 to the stack pointer. */
7067
7068 static void
7069 keep_stack_depressed (seq)
7070 rtx seq;
7071 {
7072 int i;
7073 rtx sp_from_reg = 0;
7074 int sp_modified_unknown = 0;
7075
7076 /* If the epilogue is just a single instruction, it's OK as is */
7077
7078 if (GET_CODE (seq) != SEQUENCE)
7079 return;
7080
7081 /* Scan all insns in SEQ looking for ones that modified the stack
7082 pointer. Record if it modified the stack pointer by copying it
7083 from the frame pointer or if it modified it in some other way.
7084 Then modify any subsequent stack pointer references to take that
7085 into account. We start by only allowing SP to be copied from a
7086 register (presumably FP) and then be subsequently referenced. */
7087
7088 for (i = 0; i < XVECLEN (seq, 0); i++)
7089 {
7090 rtx insn = XVECEXP (seq, 0, i);
7091
7092 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7093 continue;
7094
7095 if (reg_set_p (stack_pointer_rtx, insn))
7096 {
7097 rtx set = single_set (insn);
7098
7099 /* If SP is set as a side-effect, we can't support this. */
7100 if (set == 0)
7101 abort ();
7102
7103 if (GET_CODE (SET_SRC (set)) == REG)
7104 sp_from_reg = SET_SRC (set);
7105 else
7106 sp_modified_unknown = 1;
7107
7108 /* Don't allow the SP modification to happen. */
7109 PUT_CODE (insn, NOTE);
7110 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7111 NOTE_SOURCE_FILE (insn) = 0;
7112 }
7113 else if (reg_referenced_p (stack_pointer_rtx, PATTERN (insn)))
7114 {
7115 if (sp_modified_unknown)
7116 abort ();
7117
7118 else if (sp_from_reg != 0)
7119 PATTERN (insn)
7120 = replace_rtx (PATTERN (insn), stack_pointer_rtx, sp_from_reg);
7121 }
7122 }
7123 }
7124 #endif
7125
7126 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
7127 this into place with notes indicating where the prologue ends and where
7128 the epilogue begins. Update the basic block information when possible. */
7129
7130 void
7131 thread_prologue_and_epilogue_insns (f)
7132 rtx f ATTRIBUTE_UNUSED;
7133 {
7134 int inserted = 0;
7135 edge e;
7136 rtx seq;
7137 #ifdef HAVE_prologue
7138 rtx prologue_end = NULL_RTX;
7139 #endif
7140 #if defined (HAVE_epilogue) || defined(HAVE_return)
7141 rtx epilogue_end = NULL_RTX;
7142 #endif
7143
7144 #ifdef HAVE_prologue
7145 if (HAVE_prologue)
7146 {
7147 start_sequence ();
7148 seq = gen_prologue ();
7149 emit_insn (seq);
7150
7151 /* Retain a map of the prologue insns. */
7152 if (GET_CODE (seq) != SEQUENCE)
7153 seq = get_insns ();
7154 record_insns (seq, &prologue);
7155 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7156
7157 seq = gen_sequence ();
7158 end_sequence ();
7159
7160 /* If optimization is off, and perhaps in an empty function,
7161 the entry block will have no successors. */
7162 if (ENTRY_BLOCK_PTR->succ)
7163 {
7164 /* Can't deal with multiple successsors of the entry block. */
7165 if (ENTRY_BLOCK_PTR->succ->succ_next)
7166 abort ();
7167
7168 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7169 inserted = 1;
7170 }
7171 else
7172 emit_insn_after (seq, f);
7173 }
7174 #endif
7175
7176 /* If the exit block has no non-fake predecessors, we don't need
7177 an epilogue. */
7178 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7179 if ((e->flags & EDGE_FAKE) == 0)
7180 break;
7181 if (e == NULL)
7182 goto epilogue_done;
7183
7184 #ifdef HAVE_return
7185 if (optimize && HAVE_return)
7186 {
7187 /* If we're allowed to generate a simple return instruction,
7188 then by definition we don't need a full epilogue. Examine
7189 the block that falls through to EXIT. If it does not
7190 contain any code, examine its predecessors and try to
7191 emit (conditional) return instructions. */
7192
7193 basic_block last;
7194 edge e_next;
7195 rtx label;
7196
7197 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7198 if (e->flags & EDGE_FALLTHRU)
7199 break;
7200 if (e == NULL)
7201 goto epilogue_done;
7202 last = e->src;
7203
7204 /* Verify that there are no active instructions in the last block. */
7205 label = last->end;
7206 while (label && GET_CODE (label) != CODE_LABEL)
7207 {
7208 if (active_insn_p (label))
7209 break;
7210 label = PREV_INSN (label);
7211 }
7212
7213 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7214 {
7215 rtx epilogue_line_note = NULL_RTX;
7216
7217 /* Locate the line number associated with the closing brace,
7218 if we can find one. */
7219 for (seq = get_last_insn ();
7220 seq && ! active_insn_p (seq);
7221 seq = PREV_INSN (seq))
7222 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7223 {
7224 epilogue_line_note = seq;
7225 break;
7226 }
7227
7228 for (e = last->pred; e; e = e_next)
7229 {
7230 basic_block bb = e->src;
7231 rtx jump;
7232
7233 e_next = e->pred_next;
7234 if (bb == ENTRY_BLOCK_PTR)
7235 continue;
7236
7237 jump = bb->end;
7238 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7239 continue;
7240
7241 /* If we have an unconditional jump, we can replace that
7242 with a simple return instruction. */
7243 if (simplejump_p (jump))
7244 {
7245 emit_return_into_block (bb, epilogue_line_note);
7246 flow_delete_insn (jump);
7247 }
7248
7249 /* If we have a conditional jump, we can try to replace
7250 that with a conditional return instruction. */
7251 else if (condjump_p (jump))
7252 {
7253 rtx ret, *loc;
7254
7255 ret = SET_SRC (PATTERN (jump));
7256 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7257 loc = &XEXP (ret, 1);
7258 else
7259 loc = &XEXP (ret, 2);
7260 ret = gen_rtx_RETURN (VOIDmode);
7261
7262 if (! validate_change (jump, loc, ret, 0))
7263 continue;
7264 if (JUMP_LABEL (jump))
7265 LABEL_NUSES (JUMP_LABEL (jump))--;
7266
7267 /* If this block has only one successor, it both jumps
7268 and falls through to the fallthru block, so we can't
7269 delete the edge. */
7270 if (bb->succ->succ_next == NULL)
7271 continue;
7272 }
7273 else
7274 continue;
7275
7276 /* Fix up the CFG for the successful change we just made. */
7277 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7278 }
7279
7280 /* Emit a return insn for the exit fallthru block. Whether
7281 this is still reachable will be determined later. */
7282
7283 emit_barrier_after (last->end);
7284 emit_return_into_block (last, epilogue_line_note);
7285 epilogue_end = last->end;
7286 goto epilogue_done;
7287 }
7288 }
7289 #endif
7290 #ifdef HAVE_epilogue
7291 if (HAVE_epilogue)
7292 {
7293 /* Find the edge that falls through to EXIT. Other edges may exist
7294 due to RETURN instructions, but those don't need epilogues.
7295 There really shouldn't be a mixture -- either all should have
7296 been converted or none, however... */
7297
7298 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7299 if (e->flags & EDGE_FALLTHRU)
7300 break;
7301 if (e == NULL)
7302 goto epilogue_done;
7303
7304 start_sequence ();
7305 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7306
7307 seq = gen_epilogue ();
7308
7309 /* If this function returns with the stack depressed, massage
7310 the epilogue to actually do that. */
7311 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7312 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7313 keep_stack_depressed (seq);
7314
7315 emit_jump_insn (seq);
7316
7317 /* Retain a map of the epilogue insns. */
7318 if (GET_CODE (seq) != SEQUENCE)
7319 seq = get_insns ();
7320 record_insns (seq, &epilogue);
7321
7322 seq = gen_sequence ();
7323 end_sequence ();
7324
7325 insert_insn_on_edge (seq, e);
7326 inserted = 1;
7327 }
7328 #endif
7329 epilogue_done:
7330
7331 if (inserted)
7332 commit_edge_insertions ();
7333
7334 #ifdef HAVE_sibcall_epilogue
7335 /* Emit sibling epilogues before any sibling call sites. */
7336 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7337 {
7338 basic_block bb = e->src;
7339 rtx insn = bb->end;
7340 rtx i;
7341 rtx newinsn;
7342
7343 if (GET_CODE (insn) != CALL_INSN
7344 || ! SIBLING_CALL_P (insn))
7345 continue;
7346
7347 start_sequence ();
7348 seq = gen_sibcall_epilogue ();
7349 end_sequence ();
7350
7351 i = PREV_INSN (insn);
7352 newinsn = emit_insn_before (seq, insn);
7353
7354 /* Update the UID to basic block map. */
7355 for (i = NEXT_INSN (i); i != insn; i = NEXT_INSN (i))
7356 set_block_for_insn (i, bb);
7357
7358 /* Retain a map of the epilogue insns. Used in life analysis to
7359 avoid getting rid of sibcall epilogue insns. */
7360 record_insns (GET_CODE (seq) == SEQUENCE
7361 ? seq : newinsn, &sibcall_epilogue);
7362 }
7363 #endif
7364
7365 #ifdef HAVE_prologue
7366 if (prologue_end)
7367 {
7368 rtx insn, prev;
7369
7370 /* GDB handles `break f' by setting a breakpoint on the first
7371 line note after the prologue. Which means (1) that if
7372 there are line number notes before where we inserted the
7373 prologue we should move them, and (2) we should generate a
7374 note before the end of the first basic block, if there isn't
7375 one already there. */
7376
7377 for (insn = prologue_end; insn; insn = prev)
7378 {
7379 prev = PREV_INSN (insn);
7380 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7381 {
7382 /* Note that we cannot reorder the first insn in the
7383 chain, since rest_of_compilation relies on that
7384 remaining constant. */
7385 if (prev == NULL)
7386 break;
7387 reorder_insns (insn, insn, prologue_end);
7388 }
7389 }
7390
7391 /* Find the last line number note in the first block. */
7392 for (insn = BASIC_BLOCK (0)->end;
7393 insn != prologue_end;
7394 insn = PREV_INSN (insn))
7395 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7396 break;
7397
7398 /* If we didn't find one, make a copy of the first line number
7399 we run across. */
7400 if (! insn)
7401 {
7402 for (insn = next_active_insn (prologue_end);
7403 insn;
7404 insn = PREV_INSN (insn))
7405 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7406 {
7407 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7408 NOTE_LINE_NUMBER (insn),
7409 prologue_end);
7410 break;
7411 }
7412 }
7413 }
7414 #endif
7415 #ifdef HAVE_epilogue
7416 if (epilogue_end)
7417 {
7418 rtx insn, next;
7419
7420 /* Similarly, move any line notes that appear after the epilogue.
7421 There is no need, however, to be quite so anal about the existance
7422 of such a note. */
7423 for (insn = epilogue_end; insn; insn = next)
7424 {
7425 next = NEXT_INSN (insn);
7426 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7427 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7428 }
7429 }
7430 #endif
7431 }
7432
7433 /* Reposition the prologue-end and epilogue-begin notes after instruction
7434 scheduling and delayed branch scheduling. */
7435
7436 void
7437 reposition_prologue_and_epilogue_notes (f)
7438 rtx f ATTRIBUTE_UNUSED;
7439 {
7440 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7441 int len;
7442
7443 if ((len = VARRAY_SIZE (prologue)) > 0)
7444 {
7445 register rtx insn, note = 0;
7446
7447 /* Scan from the beginning until we reach the last prologue insn.
7448 We apparently can't depend on basic_block_{head,end} after
7449 reorg has run. */
7450 for (insn = f; len && insn; insn = NEXT_INSN (insn))
7451 {
7452 if (GET_CODE (insn) == NOTE)
7453 {
7454 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7455 note = insn;
7456 }
7457 else if ((len -= contains (insn, prologue)) == 0)
7458 {
7459 rtx next;
7460 /* Find the prologue-end note if we haven't already, and
7461 move it to just after the last prologue insn. */
7462 if (note == 0)
7463 {
7464 for (note = insn; (note = NEXT_INSN (note));)
7465 if (GET_CODE (note) == NOTE
7466 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7467 break;
7468 }
7469
7470 next = NEXT_INSN (note);
7471
7472 /* Whether or not we can depend on BLOCK_HEAD,
7473 attempt to keep it up-to-date. */
7474 if (BLOCK_HEAD (0) == note)
7475 BLOCK_HEAD (0) = next;
7476
7477 remove_insn (note);
7478 add_insn_after (note, insn);
7479 }
7480 }
7481 }
7482
7483 if ((len = VARRAY_SIZE (epilogue)) > 0)
7484 {
7485 register rtx insn, note = 0;
7486
7487 /* Scan from the end until we reach the first epilogue insn.
7488 We apparently can't depend on basic_block_{head,end} after
7489 reorg has run. */
7490 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7491 {
7492 if (GET_CODE (insn) == NOTE)
7493 {
7494 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7495 note = insn;
7496 }
7497 else if ((len -= contains (insn, epilogue)) == 0)
7498 {
7499 /* Find the epilogue-begin note if we haven't already, and
7500 move it to just before the first epilogue insn. */
7501 if (note == 0)
7502 {
7503 for (note = insn; (note = PREV_INSN (note));)
7504 if (GET_CODE (note) == NOTE
7505 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7506 break;
7507 }
7508
7509 /* Whether or not we can depend on BLOCK_HEAD,
7510 attempt to keep it up-to-date. */
7511 if (n_basic_blocks
7512 && BLOCK_HEAD (n_basic_blocks-1) == insn)
7513 BLOCK_HEAD (n_basic_blocks-1) = note;
7514
7515 remove_insn (note);
7516 add_insn_before (note, insn);
7517 }
7518 }
7519 }
7520 #endif /* HAVE_prologue or HAVE_epilogue */
7521 }
7522
7523 /* Mark T for GC. */
7524
7525 static void
7526 mark_temp_slot (t)
7527 struct temp_slot *t;
7528 {
7529 while (t)
7530 {
7531 ggc_mark_rtx (t->slot);
7532 ggc_mark_rtx (t->address);
7533 ggc_mark_tree (t->rtl_expr);
7534 ggc_mark_tree (t->type);
7535
7536 t = t->next;
7537 }
7538 }
7539
7540 /* Mark P for GC. */
7541
7542 static void
7543 mark_function_status (p)
7544 struct function *p;
7545 {
7546 int i;
7547 rtx *r;
7548
7549 if (p == 0)
7550 return;
7551
7552 ggc_mark_rtx (p->arg_offset_rtx);
7553
7554 if (p->x_parm_reg_stack_loc)
7555 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
7556 i > 0; --i, ++r)
7557 ggc_mark_rtx (*r);
7558
7559 ggc_mark_rtx (p->return_rtx);
7560 ggc_mark_rtx (p->x_cleanup_label);
7561 ggc_mark_rtx (p->x_return_label);
7562 ggc_mark_rtx (p->x_save_expr_regs);
7563 ggc_mark_rtx (p->x_stack_slot_list);
7564 ggc_mark_rtx (p->x_parm_birth_insn);
7565 ggc_mark_rtx (p->x_tail_recursion_label);
7566 ggc_mark_rtx (p->x_tail_recursion_reentry);
7567 ggc_mark_rtx (p->internal_arg_pointer);
7568 ggc_mark_rtx (p->x_arg_pointer_save_area);
7569 ggc_mark_tree (p->x_rtl_expr_chain);
7570 ggc_mark_rtx (p->x_last_parm_insn);
7571 ggc_mark_tree (p->x_context_display);
7572 ggc_mark_tree (p->x_trampoline_list);
7573 ggc_mark_rtx (p->epilogue_delay_list);
7574 ggc_mark_rtx (p->x_clobber_return_insn);
7575
7576 mark_temp_slot (p->x_temp_slots);
7577
7578 {
7579 struct var_refs_queue *q = p->fixup_var_refs_queue;
7580 while (q)
7581 {
7582 ggc_mark_rtx (q->modified);
7583 q = q->next;
7584 }
7585 }
7586
7587 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
7588 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
7589 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
7590 ggc_mark_tree (p->x_nonlocal_labels);
7591 }
7592
7593 /* Mark the function chain ARG (which is really a struct function **)
7594 for GC. */
7595
7596 static void
7597 mark_function_chain (arg)
7598 void *arg;
7599 {
7600 struct function *f = *(struct function **) arg;
7601
7602 for (; f; f = f->next_global)
7603 {
7604 ggc_mark_tree (f->decl);
7605
7606 mark_function_status (f);
7607 mark_eh_status (f->eh);
7608 mark_stmt_status (f->stmt);
7609 mark_expr_status (f->expr);
7610 mark_emit_status (f->emit);
7611 mark_varasm_status (f->varasm);
7612
7613 if (mark_machine_status)
7614 (*mark_machine_status) (f);
7615 if (mark_lang_status)
7616 (*mark_lang_status) (f);
7617
7618 if (f->original_arg_vector)
7619 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7620 if (f->original_decl_initial)
7621 ggc_mark_tree (f->original_decl_initial);
7622 }
7623 }
7624
7625 /* Called once, at initialization, to initialize function.c. */
7626
7627 void
7628 init_function_once ()
7629 {
7630 ggc_add_root (&all_functions, 1, sizeof all_functions,
7631 mark_function_chain);
7632
7633 VARRAY_INT_INIT (prologue, 0, "prologue");
7634 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7635 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
7636 }
This page took 0.375991 seconds and 6 git commands to generate.