]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
function.h (emit_status): Delete member regno_pointer_flag and rename regno_pointer_f...
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59 #include "hash.h"
60 #include "ggc.h"
61 #include "tm_p.h"
62
63 #ifndef ACCUMULATE_OUTGOING_ARGS
64 #define ACCUMULATE_OUTGOING_ARGS 0
65 #endif
66
67 #ifndef TRAMPOLINE_ALIGNMENT
68 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
69 #endif
70
71 #ifndef LOCAL_ALIGNMENT
72 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
73 #endif
74
75 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
76 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
77 #endif
78
79 /* Some systems use __main in a way incompatible with its use in gcc, in these
80 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
81 give the same symbol without quotes for an alternative entry point. You
82 must define both, or neither. */
83 #ifndef NAME__MAIN
84 #define NAME__MAIN "__main"
85 #define SYMBOL__MAIN __main
86 #endif
87
88 /* Round a value to the lowest integer less than it that is a multiple of
89 the required alignment. Avoid using division in case the value is
90 negative. Assume the alignment is a power of two. */
91 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
92
93 /* Similar, but round to the next highest integer that meets the
94 alignment. */
95 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
96
97 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
98 during rtl generation. If they are different register numbers, this is
99 always true. It may also be true if
100 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
101 generation. See fix_lexical_addr for details. */
102
103 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
104 #define NEED_SEPARATE_AP
105 #endif
106
107 /* Nonzero if function being compiled doesn't contain any calls
108 (ignoring the prologue and epilogue). This is set prior to
109 local register allocation and is valid for the remaining
110 compiler passes. */
111 int current_function_is_leaf;
112
113 /* Nonzero if function being compiled doesn't contain any instructions
114 that can throw an exception. This is set prior to final. */
115
116 int current_function_nothrow;
117
118 /* Nonzero if function being compiled doesn't modify the stack pointer
119 (ignoring the prologue and epilogue). This is only valid after
120 life_analysis has run. */
121 int current_function_sp_is_unchanging;
122
123 /* Nonzero if the function being compiled is a leaf function which only
124 uses leaf registers. This is valid after reload (specifically after
125 sched2) and is useful only if the port defines LEAF_REGISTERS. */
126 int current_function_uses_only_leaf_regs;
127
128 /* Nonzero once virtual register instantiation has been done.
129 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
130 static int virtuals_instantiated;
131
132 /* These variables hold pointers to functions to
133 save and restore machine-specific data,
134 in push_function_context and pop_function_context. */
135 void (*init_machine_status) PARAMS ((struct function *));
136 void (*save_machine_status) PARAMS ((struct function *));
137 void (*restore_machine_status) PARAMS ((struct function *));
138 void (*mark_machine_status) PARAMS ((struct function *));
139 void (*free_machine_status) PARAMS ((struct function *));
140
141 /* Likewise, but for language-specific data. */
142 void (*init_lang_status) PARAMS ((struct function *));
143 void (*save_lang_status) PARAMS ((struct function *));
144 void (*restore_lang_status) PARAMS ((struct function *));
145 void (*mark_lang_status) PARAMS ((struct function *));
146 void (*free_lang_status) PARAMS ((struct function *));
147
148 /* The FUNCTION_DECL for an inline function currently being expanded. */
149 tree inline_function_decl;
150
151 /* The currently compiled function. */
152 struct function *cfun = 0;
153
154 /* Global list of all compiled functions. */
155 struct function *all_functions = 0;
156
157 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
158 static varray_type prologue;
159 static varray_type epilogue;
160
161 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
162 in this function. */
163 static varray_type sibcall_epilogue;
164 \f
165 /* In order to evaluate some expressions, such as function calls returning
166 structures in memory, we need to temporarily allocate stack locations.
167 We record each allocated temporary in the following structure.
168
169 Associated with each temporary slot is a nesting level. When we pop up
170 one level, all temporaries associated with the previous level are freed.
171 Normally, all temporaries are freed after the execution of the statement
172 in which they were created. However, if we are inside a ({...}) grouping,
173 the result may be in a temporary and hence must be preserved. If the
174 result could be in a temporary, we preserve it if we can determine which
175 one it is in. If we cannot determine which temporary may contain the
176 result, all temporaries are preserved. A temporary is preserved by
177 pretending it was allocated at the previous nesting level.
178
179 Automatic variables are also assigned temporary slots, at the nesting
180 level where they are defined. They are marked a "kept" so that
181 free_temp_slots will not free them. */
182
183 struct temp_slot
184 {
185 /* Points to next temporary slot. */
186 struct temp_slot *next;
187 /* The rtx to used to reference the slot. */
188 rtx slot;
189 /* The rtx used to represent the address if not the address of the
190 slot above. May be an EXPR_LIST if multiple addresses exist. */
191 rtx address;
192 /* The alignment (in bits) of the slot. */
193 int align;
194 /* The size, in units, of the slot. */
195 HOST_WIDE_INT size;
196 /* The alias set for the slot. If the alias set is zero, we don't
197 know anything about the alias set of the slot. We must only
198 reuse a slot if it is assigned an object of the same alias set.
199 Otherwise, the rest of the compiler may assume that the new use
200 of the slot cannot alias the old use of the slot, which is
201 false. If the slot has alias set zero, then we can't reuse the
202 slot at all, since we have no idea what alias set may have been
203 imposed on the memory. For example, if the stack slot is the
204 call frame for an inline functioned, we have no idea what alias
205 sets will be assigned to various pieces of the call frame. */
206 HOST_WIDE_INT alias_set;
207 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
208 tree rtl_expr;
209 /* Non-zero if this temporary is currently in use. */
210 char in_use;
211 /* Non-zero if this temporary has its address taken. */
212 char addr_taken;
213 /* Nesting level at which this slot is being used. */
214 int level;
215 /* Non-zero if this should survive a call to free_temp_slots. */
216 int keep;
217 /* The offset of the slot from the frame_pointer, including extra space
218 for alignment. This info is for combine_temp_slots. */
219 HOST_WIDE_INT base_offset;
220 /* The size of the slot, including extra space for alignment. This
221 info is for combine_temp_slots. */
222 HOST_WIDE_INT full_size;
223 };
224 \f
225 /* This structure is used to record MEMs or pseudos used to replace VAR, any
226 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
227 maintain this list in case two operands of an insn were required to match;
228 in that case we must ensure we use the same replacement. */
229
230 struct fixup_replacement
231 {
232 rtx old;
233 rtx new;
234 struct fixup_replacement *next;
235 };
236
237 struct insns_for_mem_entry {
238 /* The KEY in HE will be a MEM. */
239 struct hash_entry he;
240 /* These are the INSNS which reference the MEM. */
241 rtx insns;
242 };
243
244 /* Forward declarations. */
245
246 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
247 int, struct function *));
248 static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode,
249 HOST_WIDE_INT, int, tree));
250 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
251 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
252 enum machine_mode, enum machine_mode,
253 int, unsigned int, int,
254 struct hash_table *));
255 static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
256 enum machine_mode,
257 struct hash_table *));
258 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
259 struct hash_table *));
260 static struct fixup_replacement
261 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
262 static void fixup_var_refs_insns PARAMS ((rtx, enum machine_mode, int,
263 rtx, int, struct hash_table *));
264 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
265 struct fixup_replacement **));
266 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
267 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
268 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
269 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
270 static void instantiate_decls PARAMS ((tree, int));
271 static void instantiate_decls_1 PARAMS ((tree, int));
272 static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
273 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
274 static void delete_handlers PARAMS ((void));
275 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
276 struct args_size *));
277 #ifndef ARGS_GROW_DOWNWARD
278 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
279 tree));
280 #endif
281 static rtx round_trampoline_addr PARAMS ((rtx));
282 static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
283 static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
284 static tree blocks_nreverse PARAMS ((tree));
285 static int all_blocks PARAMS ((tree, tree *));
286 static tree *get_block_vector PARAMS ((tree, int *));
287 /* We always define `record_insns' even if its not used so that we
288 can always export `prologue_epilogue_contains'. */
289 static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
290 static int contains PARAMS ((rtx, varray_type));
291 #ifdef HAVE_return
292 static void emit_return_into_block PARAMS ((basic_block, rtx));
293 #endif
294 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
295 static boolean purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
296 struct hash_table *));
297 static void purge_single_hard_subreg_set PARAMS ((rtx));
298 #ifdef HAVE_epilogue
299 static void keep_stack_depressed PARAMS ((rtx));
300 #endif
301 static int is_addressof PARAMS ((rtx *, void *));
302 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
303 struct hash_table *,
304 hash_table_key));
305 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
306 static boolean insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
307 static int insns_for_mem_walk PARAMS ((rtx *, void *));
308 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
309 static void mark_temp_slot PARAMS ((struct temp_slot *));
310 static void mark_function_status PARAMS ((struct function *));
311 static void mark_function_chain PARAMS ((void *));
312 static void prepare_function_start PARAMS ((void));
313 static void do_clobber_return_reg PARAMS ((rtx, void *));
314 static void do_use_return_reg PARAMS ((rtx, void *));
315 \f
316 /* Pointer to chain of `struct function' for containing functions. */
317 struct function *outer_function_chain;
318
319 /* Given a function decl for a containing function,
320 return the `struct function' for it. */
321
322 struct function *
323 find_function_data (decl)
324 tree decl;
325 {
326 struct function *p;
327
328 for (p = outer_function_chain; p; p = p->next)
329 if (p->decl == decl)
330 return p;
331
332 abort ();
333 }
334
335 /* Save the current context for compilation of a nested function.
336 This is called from language-specific code. The caller should use
337 the save_lang_status callback to save any language-specific state,
338 since this function knows only about language-independent
339 variables. */
340
341 void
342 push_function_context_to (context)
343 tree context;
344 {
345 struct function *p, *context_data;
346
347 if (context)
348 {
349 context_data = (context == current_function_decl
350 ? cfun
351 : find_function_data (context));
352 context_data->contains_functions = 1;
353 }
354
355 if (cfun == 0)
356 init_dummy_function_start ();
357 p = cfun;
358
359 p->next = outer_function_chain;
360 outer_function_chain = p;
361 p->fixup_var_refs_queue = 0;
362
363 if (save_lang_status)
364 (*save_lang_status) (p);
365 if (save_machine_status)
366 (*save_machine_status) (p);
367
368 cfun = 0;
369 }
370
371 void
372 push_function_context ()
373 {
374 push_function_context_to (current_function_decl);
375 }
376
377 /* Restore the last saved context, at the end of a nested function.
378 This function is called from language-specific code. */
379
380 void
381 pop_function_context_from (context)
382 tree context ATTRIBUTE_UNUSED;
383 {
384 struct function *p = outer_function_chain;
385 struct var_refs_queue *queue;
386 struct var_refs_queue *next;
387
388 cfun = p;
389 outer_function_chain = p->next;
390
391 current_function_decl = p->decl;
392 reg_renumber = 0;
393
394 restore_emit_status (p);
395
396 if (restore_machine_status)
397 (*restore_machine_status) (p);
398 if (restore_lang_status)
399 (*restore_lang_status) (p);
400
401 /* Finish doing put_var_into_stack for any of our variables
402 which became addressable during the nested function. */
403 for (queue = p->fixup_var_refs_queue; queue; queue = next)
404 {
405 next = queue->next;
406 fixup_var_refs (queue->modified, queue->promoted_mode,
407 queue->unsignedp, 0);
408 free (queue);
409 }
410 p->fixup_var_refs_queue = 0;
411
412 /* Reset variables that have known state during rtx generation. */
413 rtx_equal_function_value_matters = 1;
414 virtuals_instantiated = 0;
415 generating_concat_p = 1;
416 }
417
418 void
419 pop_function_context ()
420 {
421 pop_function_context_from (current_function_decl);
422 }
423
424 /* Clear out all parts of the state in F that can safely be discarded
425 after the function has been parsed, but not compiled, to let
426 garbage collection reclaim the memory. */
427
428 void
429 free_after_parsing (f)
430 struct function *f;
431 {
432 /* f->expr->forced_labels is used by code generation. */
433 /* f->emit->regno_reg_rtx is used by code generation. */
434 /* f->varasm is used by code generation. */
435 /* f->eh->eh_return_stub_label is used by code generation. */
436
437 if (free_lang_status)
438 (*free_lang_status) (f);
439 free_stmt_status (f);
440 }
441
442 /* Clear out all parts of the state in F that can safely be discarded
443 after the function has been compiled, to let garbage collection
444 reclaim the memory. */
445
446 void
447 free_after_compilation (f)
448 struct function *f;
449 {
450 struct temp_slot *ts;
451 struct temp_slot *next;
452
453 free_eh_status (f);
454 free_expr_status (f);
455 free_emit_status (f);
456 free_varasm_status (f);
457
458 if (free_machine_status)
459 (*free_machine_status) (f);
460
461 if (f->x_parm_reg_stack_loc)
462 free (f->x_parm_reg_stack_loc);
463
464 for (ts = f->x_temp_slots; ts; ts = next)
465 {
466 next = ts->next;
467 free (ts);
468 }
469 f->x_temp_slots = NULL;
470
471 f->arg_offset_rtx = NULL;
472 f->return_rtx = NULL;
473 f->internal_arg_pointer = NULL;
474 f->x_nonlocal_labels = NULL;
475 f->x_nonlocal_goto_handler_slots = NULL;
476 f->x_nonlocal_goto_handler_labels = NULL;
477 f->x_nonlocal_goto_stack_level = NULL;
478 f->x_cleanup_label = NULL;
479 f->x_return_label = NULL;
480 f->x_save_expr_regs = NULL;
481 f->x_stack_slot_list = NULL;
482 f->x_rtl_expr_chain = NULL;
483 f->x_tail_recursion_label = NULL;
484 f->x_tail_recursion_reentry = NULL;
485 f->x_arg_pointer_save_area = NULL;
486 f->x_clobber_return_insn = NULL;
487 f->x_context_display = NULL;
488 f->x_trampoline_list = NULL;
489 f->x_parm_birth_insn = NULL;
490 f->x_last_parm_insn = NULL;
491 f->x_parm_reg_stack_loc = NULL;
492 f->fixup_var_refs_queue = NULL;
493 f->original_arg_vector = NULL;
494 f->original_decl_initial = NULL;
495 f->inl_last_parm_insn = NULL;
496 f->epilogue_delay_list = NULL;
497 }
498 \f
499 /* Allocate fixed slots in the stack frame of the current function. */
500
501 /* Return size needed for stack frame based on slots so far allocated in
502 function F.
503 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
504 the caller may have to do that. */
505
506 HOST_WIDE_INT
507 get_func_frame_size (f)
508 struct function *f;
509 {
510 #ifdef FRAME_GROWS_DOWNWARD
511 return -f->x_frame_offset;
512 #else
513 return f->x_frame_offset;
514 #endif
515 }
516
517 /* Return size needed for stack frame based on slots so far allocated.
518 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
519 the caller may have to do that. */
520 HOST_WIDE_INT
521 get_frame_size ()
522 {
523 return get_func_frame_size (cfun);
524 }
525
526 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
527 with machine mode MODE.
528
529 ALIGN controls the amount of alignment for the address of the slot:
530 0 means according to MODE,
531 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
532 positive specifies alignment boundary in bits.
533
534 We do not round to stack_boundary here.
535
536 FUNCTION specifies the function to allocate in. */
537
538 static rtx
539 assign_stack_local_1 (mode, size, align, function)
540 enum machine_mode mode;
541 HOST_WIDE_INT size;
542 int align;
543 struct function *function;
544 {
545 register rtx x, addr;
546 int bigend_correction = 0;
547 int alignment;
548
549 if (align == 0)
550 {
551 tree type;
552
553 if (mode == BLKmode)
554 alignment = BIGGEST_ALIGNMENT;
555 else
556 alignment = GET_MODE_ALIGNMENT (mode);
557
558 /* Allow the target to (possibly) increase the alignment of this
559 stack slot. */
560 type = type_for_mode (mode, 0);
561 if (type)
562 alignment = LOCAL_ALIGNMENT (type, alignment);
563
564 alignment /= BITS_PER_UNIT;
565 }
566 else if (align == -1)
567 {
568 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
569 size = CEIL_ROUND (size, alignment);
570 }
571 else
572 alignment = align / BITS_PER_UNIT;
573
574 #ifdef FRAME_GROWS_DOWNWARD
575 function->x_frame_offset -= size;
576 #endif
577
578 /* Ignore alignment we can't do with expected alignment of the boundary. */
579 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
580 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
581
582 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
583 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
584
585 /* Round frame offset to that alignment.
586 We must be careful here, since FRAME_OFFSET might be negative and
587 division with a negative dividend isn't as well defined as we might
588 like. So we instead assume that ALIGNMENT is a power of two and
589 use logical operations which are unambiguous. */
590 #ifdef FRAME_GROWS_DOWNWARD
591 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
592 #else
593 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
594 #endif
595
596 /* On a big-endian machine, if we are allocating more space than we will use,
597 use the least significant bytes of those that are allocated. */
598 if (BYTES_BIG_ENDIAN && mode != BLKmode)
599 bigend_correction = size - GET_MODE_SIZE (mode);
600
601 /* If we have already instantiated virtual registers, return the actual
602 address relative to the frame pointer. */
603 if (function == cfun && virtuals_instantiated)
604 addr = plus_constant (frame_pointer_rtx,
605 (frame_offset + bigend_correction
606 + STARTING_FRAME_OFFSET));
607 else
608 addr = plus_constant (virtual_stack_vars_rtx,
609 function->x_frame_offset + bigend_correction);
610
611 #ifndef FRAME_GROWS_DOWNWARD
612 function->x_frame_offset += size;
613 #endif
614
615 x = gen_rtx_MEM (mode, addr);
616
617 function->x_stack_slot_list
618 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
619
620 return x;
621 }
622
623 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
624 current function. */
625
626 rtx
627 assign_stack_local (mode, size, align)
628 enum machine_mode mode;
629 HOST_WIDE_INT size;
630 int align;
631 {
632 return assign_stack_local_1 (mode, size, align, cfun);
633 }
634 \f
635 /* Allocate a temporary stack slot and record it for possible later
636 reuse.
637
638 MODE is the machine mode to be given to the returned rtx.
639
640 SIZE is the size in units of the space required. We do no rounding here
641 since assign_stack_local will do any required rounding.
642
643 KEEP is 1 if this slot is to be retained after a call to
644 free_temp_slots. Automatic variables for a block are allocated
645 with this flag. KEEP is 2 if we allocate a longer term temporary,
646 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
647 if we are to allocate something at an inner level to be treated as
648 a variable in the block (e.g., a SAVE_EXPR).
649
650 TYPE is the type that will be used for the stack slot. */
651
652 static rtx
653 assign_stack_temp_for_type (mode, size, keep, type)
654 enum machine_mode mode;
655 HOST_WIDE_INT size;
656 int keep;
657 tree type;
658 {
659 int align;
660 HOST_WIDE_INT alias_set;
661 struct temp_slot *p, *best_p = 0;
662
663 /* If SIZE is -1 it means that somebody tried to allocate a temporary
664 of a variable size. */
665 if (size == -1)
666 abort ();
667
668 /* If we know the alias set for the memory that will be used, use
669 it. If there's no TYPE, then we don't know anything about the
670 alias set for the memory. */
671 if (type)
672 alias_set = get_alias_set (type);
673 else
674 alias_set = 0;
675
676 if (mode == BLKmode)
677 align = BIGGEST_ALIGNMENT;
678 else
679 align = GET_MODE_ALIGNMENT (mode);
680
681 if (! type)
682 type = type_for_mode (mode, 0);
683
684 if (type)
685 align = LOCAL_ALIGNMENT (type, align);
686
687 /* Try to find an available, already-allocated temporary of the proper
688 mode which meets the size and alignment requirements. Choose the
689 smallest one with the closest alignment. */
690 for (p = temp_slots; p; p = p->next)
691 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
692 && ! p->in_use
693 && (! flag_strict_aliasing
694 || (alias_set && p->alias_set == alias_set))
695 && (best_p == 0 || best_p->size > p->size
696 || (best_p->size == p->size && best_p->align > p->align)))
697 {
698 if (p->align == align && p->size == size)
699 {
700 best_p = 0;
701 break;
702 }
703 best_p = p;
704 }
705
706 /* Make our best, if any, the one to use. */
707 if (best_p)
708 {
709 /* If there are enough aligned bytes left over, make them into a new
710 temp_slot so that the extra bytes don't get wasted. Do this only
711 for BLKmode slots, so that we can be sure of the alignment. */
712 if (GET_MODE (best_p->slot) == BLKmode)
713 {
714 int alignment = best_p->align / BITS_PER_UNIT;
715 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
716
717 if (best_p->size - rounded_size >= alignment)
718 {
719 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
720 p->in_use = p->addr_taken = 0;
721 p->size = best_p->size - rounded_size;
722 p->base_offset = best_p->base_offset + rounded_size;
723 p->full_size = best_p->full_size - rounded_size;
724 p->slot = gen_rtx_MEM (BLKmode,
725 plus_constant (XEXP (best_p->slot, 0),
726 rounded_size));
727 p->align = best_p->align;
728 p->address = 0;
729 p->rtl_expr = 0;
730 p->alias_set = best_p->alias_set;
731 p->next = temp_slots;
732 temp_slots = p;
733
734 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
735 stack_slot_list);
736
737 best_p->size = rounded_size;
738 best_p->full_size = rounded_size;
739 }
740 }
741
742 p = best_p;
743 }
744
745 /* If we still didn't find one, make a new temporary. */
746 if (p == 0)
747 {
748 HOST_WIDE_INT frame_offset_old = frame_offset;
749
750 p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
751
752 /* We are passing an explicit alignment request to assign_stack_local.
753 One side effect of that is assign_stack_local will not round SIZE
754 to ensure the frame offset remains suitably aligned.
755
756 So for requests which depended on the rounding of SIZE, we go ahead
757 and round it now. We also make sure ALIGNMENT is at least
758 BIGGEST_ALIGNMENT. */
759 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
760 abort();
761 p->slot = assign_stack_local (mode,
762 (mode == BLKmode
763 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
764 : size),
765 align);
766
767 p->align = align;
768 p->alias_set = alias_set;
769
770 /* The following slot size computation is necessary because we don't
771 know the actual size of the temporary slot until assign_stack_local
772 has performed all the frame alignment and size rounding for the
773 requested temporary. Note that extra space added for alignment
774 can be either above or below this stack slot depending on which
775 way the frame grows. We include the extra space if and only if it
776 is above this slot. */
777 #ifdef FRAME_GROWS_DOWNWARD
778 p->size = frame_offset_old - frame_offset;
779 #else
780 p->size = size;
781 #endif
782
783 /* Now define the fields used by combine_temp_slots. */
784 #ifdef FRAME_GROWS_DOWNWARD
785 p->base_offset = frame_offset;
786 p->full_size = frame_offset_old - frame_offset;
787 #else
788 p->base_offset = frame_offset_old;
789 p->full_size = frame_offset - frame_offset_old;
790 #endif
791 p->address = 0;
792 p->next = temp_slots;
793 temp_slots = p;
794 }
795
796 p->in_use = 1;
797 p->addr_taken = 0;
798 p->rtl_expr = seq_rtl_expr;
799
800 if (keep == 2)
801 {
802 p->level = target_temp_slot_level;
803 p->keep = 0;
804 }
805 else if (keep == 3)
806 {
807 p->level = var_temp_slot_level;
808 p->keep = 0;
809 }
810 else
811 {
812 p->level = temp_slot_level;
813 p->keep = keep;
814 }
815
816 /* We may be reusing an old slot, so clear any MEM flags that may have been
817 set from before. */
818 RTX_UNCHANGING_P (p->slot) = 0;
819 MEM_IN_STRUCT_P (p->slot) = 0;
820 MEM_SCALAR_P (p->slot) = 0;
821 MEM_ALIAS_SET (p->slot) = alias_set;
822
823 if (type != 0)
824 MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
825
826 return p->slot;
827 }
828
829 /* Allocate a temporary stack slot and record it for possible later
830 reuse. First three arguments are same as in preceding function. */
831
832 rtx
833 assign_stack_temp (mode, size, keep)
834 enum machine_mode mode;
835 HOST_WIDE_INT size;
836 int keep;
837 {
838 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
839 }
840 \f
841 /* Assign a temporary of given TYPE.
842 KEEP is as for assign_stack_temp.
843 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
844 it is 0 if a register is OK.
845 DONT_PROMOTE is 1 if we should not promote values in register
846 to wider modes. */
847
848 rtx
849 assign_temp (type, keep, memory_required, dont_promote)
850 tree type;
851 int keep;
852 int memory_required;
853 int dont_promote ATTRIBUTE_UNUSED;
854 {
855 enum machine_mode mode = TYPE_MODE (type);
856 #ifndef PROMOTE_FOR_CALL_ONLY
857 int unsignedp = TREE_UNSIGNED (type);
858 #endif
859
860 if (mode == BLKmode || memory_required)
861 {
862 HOST_WIDE_INT size = int_size_in_bytes (type);
863 rtx tmp;
864
865 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
866 problems with allocating the stack space. */
867 if (size == 0)
868 size = 1;
869
870 /* Unfortunately, we don't yet know how to allocate variable-sized
871 temporaries. However, sometimes we have a fixed upper limit on
872 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
873 instead. This is the case for Chill variable-sized strings. */
874 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
875 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
876 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
877 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
878
879 tmp = assign_stack_temp_for_type (mode, size, keep, type);
880 return tmp;
881 }
882
883 #ifndef PROMOTE_FOR_CALL_ONLY
884 if (! dont_promote)
885 mode = promote_mode (type, mode, &unsignedp, 0);
886 #endif
887
888 return gen_reg_rtx (mode);
889 }
890 \f
891 /* Combine temporary stack slots which are adjacent on the stack.
892
893 This allows for better use of already allocated stack space. This is only
894 done for BLKmode slots because we can be sure that we won't have alignment
895 problems in this case. */
896
897 void
898 combine_temp_slots ()
899 {
900 struct temp_slot *p, *q;
901 struct temp_slot *prev_p, *prev_q;
902 int num_slots;
903
904 /* We can't combine slots, because the information about which slot
905 is in which alias set will be lost. */
906 if (flag_strict_aliasing)
907 return;
908
909 /* If there are a lot of temp slots, don't do anything unless
910 high levels of optimizaton. */
911 if (! flag_expensive_optimizations)
912 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
913 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
914 return;
915
916 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
917 {
918 int delete_p = 0;
919
920 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
921 for (q = p->next, prev_q = p; q; q = prev_q->next)
922 {
923 int delete_q = 0;
924 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
925 {
926 if (p->base_offset + p->full_size == q->base_offset)
927 {
928 /* Q comes after P; combine Q into P. */
929 p->size += q->size;
930 p->full_size += q->full_size;
931 delete_q = 1;
932 }
933 else if (q->base_offset + q->full_size == p->base_offset)
934 {
935 /* P comes after Q; combine P into Q. */
936 q->size += p->size;
937 q->full_size += p->full_size;
938 delete_p = 1;
939 break;
940 }
941 }
942 /* Either delete Q or advance past it. */
943 if (delete_q)
944 {
945 prev_q->next = q->next;
946 free (q);
947 }
948 else
949 prev_q = q;
950 }
951 /* Either delete P or advance past it. */
952 if (delete_p)
953 {
954 if (prev_p)
955 prev_p->next = p->next;
956 else
957 temp_slots = p->next;
958 }
959 else
960 prev_p = p;
961 }
962 }
963 \f
964 /* Find the temp slot corresponding to the object at address X. */
965
966 static struct temp_slot *
967 find_temp_slot_from_address (x)
968 rtx x;
969 {
970 struct temp_slot *p;
971 rtx next;
972
973 for (p = temp_slots; p; p = p->next)
974 {
975 if (! p->in_use)
976 continue;
977
978 else if (XEXP (p->slot, 0) == x
979 || p->address == x
980 || (GET_CODE (x) == PLUS
981 && XEXP (x, 0) == virtual_stack_vars_rtx
982 && GET_CODE (XEXP (x, 1)) == CONST_INT
983 && INTVAL (XEXP (x, 1)) >= p->base_offset
984 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
985 return p;
986
987 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
988 for (next = p->address; next; next = XEXP (next, 1))
989 if (XEXP (next, 0) == x)
990 return p;
991 }
992
993 /* If we have a sum involving a register, see if it points to a temp
994 slot. */
995 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
996 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
997 return p;
998 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
999 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1000 return p;
1001
1002 return 0;
1003 }
1004
1005 /* Indicate that NEW is an alternate way of referring to the temp slot
1006 that previously was known by OLD. */
1007
1008 void
1009 update_temp_slot_address (old, new)
1010 rtx old, new;
1011 {
1012 struct temp_slot *p;
1013
1014 if (rtx_equal_p (old, new))
1015 return;
1016
1017 p = find_temp_slot_from_address (old);
1018
1019 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
1020 is a register, see if one operand of the PLUS is a temporary
1021 location. If so, NEW points into it. Otherwise, if both OLD and
1022 NEW are a PLUS and if there is a register in common between them.
1023 If so, try a recursive call on those values. */
1024 if (p == 0)
1025 {
1026 if (GET_CODE (old) != PLUS)
1027 return;
1028
1029 if (GET_CODE (new) == REG)
1030 {
1031 update_temp_slot_address (XEXP (old, 0), new);
1032 update_temp_slot_address (XEXP (old, 1), new);
1033 return;
1034 }
1035 else if (GET_CODE (new) != PLUS)
1036 return;
1037
1038 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1039 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1040 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1041 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1042 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1043 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1044 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1045 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1046
1047 return;
1048 }
1049
1050 /* Otherwise add an alias for the temp's address. */
1051 else if (p->address == 0)
1052 p->address = new;
1053 else
1054 {
1055 if (GET_CODE (p->address) != EXPR_LIST)
1056 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1057
1058 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1059 }
1060 }
1061
1062 /* If X could be a reference to a temporary slot, mark the fact that its
1063 address was taken. */
1064
1065 void
1066 mark_temp_addr_taken (x)
1067 rtx x;
1068 {
1069 struct temp_slot *p;
1070
1071 if (x == 0)
1072 return;
1073
1074 /* If X is not in memory or is at a constant address, it cannot be in
1075 a temporary slot. */
1076 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1077 return;
1078
1079 p = find_temp_slot_from_address (XEXP (x, 0));
1080 if (p != 0)
1081 p->addr_taken = 1;
1082 }
1083
1084 /* If X could be a reference to a temporary slot, mark that slot as
1085 belonging to the to one level higher than the current level. If X
1086 matched one of our slots, just mark that one. Otherwise, we can't
1087 easily predict which it is, so upgrade all of them. Kept slots
1088 need not be touched.
1089
1090 This is called when an ({...}) construct occurs and a statement
1091 returns a value in memory. */
1092
1093 void
1094 preserve_temp_slots (x)
1095 rtx x;
1096 {
1097 struct temp_slot *p = 0;
1098
1099 /* If there is no result, we still might have some objects whose address
1100 were taken, so we need to make sure they stay around. */
1101 if (x == 0)
1102 {
1103 for (p = temp_slots; p; p = p->next)
1104 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1105 p->level--;
1106
1107 return;
1108 }
1109
1110 /* If X is a register that is being used as a pointer, see if we have
1111 a temporary slot we know it points to. To be consistent with
1112 the code below, we really should preserve all non-kept slots
1113 if we can't find a match, but that seems to be much too costly. */
1114 if (GET_CODE (x) == REG && REG_POINTER (x))
1115 p = find_temp_slot_from_address (x);
1116
1117 /* If X is not in memory or is at a constant address, it cannot be in
1118 a temporary slot, but it can contain something whose address was
1119 taken. */
1120 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1121 {
1122 for (p = temp_slots; p; p = p->next)
1123 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1124 p->level--;
1125
1126 return;
1127 }
1128
1129 /* First see if we can find a match. */
1130 if (p == 0)
1131 p = find_temp_slot_from_address (XEXP (x, 0));
1132
1133 if (p != 0)
1134 {
1135 /* Move everything at our level whose address was taken to our new
1136 level in case we used its address. */
1137 struct temp_slot *q;
1138
1139 if (p->level == temp_slot_level)
1140 {
1141 for (q = temp_slots; q; q = q->next)
1142 if (q != p && q->addr_taken && q->level == p->level)
1143 q->level--;
1144
1145 p->level--;
1146 p->addr_taken = 0;
1147 }
1148 return;
1149 }
1150
1151 /* Otherwise, preserve all non-kept slots at this level. */
1152 for (p = temp_slots; p; p = p->next)
1153 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1154 p->level--;
1155 }
1156
1157 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1158 with that RTL_EXPR, promote it into a temporary slot at the present
1159 level so it will not be freed when we free slots made in the
1160 RTL_EXPR. */
1161
1162 void
1163 preserve_rtl_expr_result (x)
1164 rtx x;
1165 {
1166 struct temp_slot *p;
1167
1168 /* If X is not in memory or is at a constant address, it cannot be in
1169 a temporary slot. */
1170 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1171 return;
1172
1173 /* If we can find a match, move it to our level unless it is already at
1174 an upper level. */
1175 p = find_temp_slot_from_address (XEXP (x, 0));
1176 if (p != 0)
1177 {
1178 p->level = MIN (p->level, temp_slot_level);
1179 p->rtl_expr = 0;
1180 }
1181
1182 return;
1183 }
1184
1185 /* Free all temporaries used so far. This is normally called at the end
1186 of generating code for a statement. Don't free any temporaries
1187 currently in use for an RTL_EXPR that hasn't yet been emitted.
1188 We could eventually do better than this since it can be reused while
1189 generating the same RTL_EXPR, but this is complex and probably not
1190 worthwhile. */
1191
1192 void
1193 free_temp_slots ()
1194 {
1195 struct temp_slot *p;
1196
1197 for (p = temp_slots; p; p = p->next)
1198 if (p->in_use && p->level == temp_slot_level && ! p->keep
1199 && p->rtl_expr == 0)
1200 p->in_use = 0;
1201
1202 combine_temp_slots ();
1203 }
1204
1205 /* Free all temporary slots used in T, an RTL_EXPR node. */
1206
1207 void
1208 free_temps_for_rtl_expr (t)
1209 tree t;
1210 {
1211 struct temp_slot *p;
1212
1213 for (p = temp_slots; p; p = p->next)
1214 if (p->rtl_expr == t)
1215 {
1216 /* If this slot is below the current TEMP_SLOT_LEVEL, then it
1217 needs to be preserved. This can happen if a temporary in
1218 the RTL_EXPR was addressed; preserve_temp_slots will move
1219 the temporary into a higher level. */
1220 if (temp_slot_level <= p->level)
1221 p->in_use = 0;
1222 else
1223 p->rtl_expr = NULL_TREE;
1224 }
1225
1226 combine_temp_slots ();
1227 }
1228
1229 /* Mark all temporaries ever allocated in this function as not suitable
1230 for reuse until the current level is exited. */
1231
1232 void
1233 mark_all_temps_used ()
1234 {
1235 struct temp_slot *p;
1236
1237 for (p = temp_slots; p; p = p->next)
1238 {
1239 p->in_use = p->keep = 1;
1240 p->level = MIN (p->level, temp_slot_level);
1241 }
1242 }
1243
1244 /* Push deeper into the nesting level for stack temporaries. */
1245
1246 void
1247 push_temp_slots ()
1248 {
1249 temp_slot_level++;
1250 }
1251
1252 /* Likewise, but save the new level as the place to allocate variables
1253 for blocks. */
1254
1255 #if 0
1256 void
1257 push_temp_slots_for_block ()
1258 {
1259 push_temp_slots ();
1260
1261 var_temp_slot_level = temp_slot_level;
1262 }
1263
1264 /* Likewise, but save the new level as the place to allocate temporaries
1265 for TARGET_EXPRs. */
1266
1267 void
1268 push_temp_slots_for_target ()
1269 {
1270 push_temp_slots ();
1271
1272 target_temp_slot_level = temp_slot_level;
1273 }
1274
1275 /* Set and get the value of target_temp_slot_level. The only
1276 permitted use of these functions is to save and restore this value. */
1277
1278 int
1279 get_target_temp_slot_level ()
1280 {
1281 return target_temp_slot_level;
1282 }
1283
1284 void
1285 set_target_temp_slot_level (level)
1286 int level;
1287 {
1288 target_temp_slot_level = level;
1289 }
1290 #endif
1291
1292 /* Pop a temporary nesting level. All slots in use in the current level
1293 are freed. */
1294
1295 void
1296 pop_temp_slots ()
1297 {
1298 struct temp_slot *p;
1299
1300 for (p = temp_slots; p; p = p->next)
1301 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1302 p->in_use = 0;
1303
1304 combine_temp_slots ();
1305
1306 temp_slot_level--;
1307 }
1308
1309 /* Initialize temporary slots. */
1310
1311 void
1312 init_temp_slots ()
1313 {
1314 /* We have not allocated any temporaries yet. */
1315 temp_slots = 0;
1316 temp_slot_level = 0;
1317 var_temp_slot_level = 0;
1318 target_temp_slot_level = 0;
1319 }
1320 \f
1321 /* Retroactively move an auto variable from a register to a stack slot.
1322 This is done when an address-reference to the variable is seen. */
1323
1324 void
1325 put_var_into_stack (decl)
1326 tree decl;
1327 {
1328 register rtx reg;
1329 enum machine_mode promoted_mode, decl_mode;
1330 struct function *function = 0;
1331 tree context;
1332 int can_use_addressof;
1333 int volatilep = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1334 int usedp = (TREE_USED (decl)
1335 || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1336
1337 context = decl_function_context (decl);
1338
1339 /* Get the current rtl used for this object and its original mode. */
1340 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1341
1342 /* No need to do anything if decl has no rtx yet
1343 since in that case caller is setting TREE_ADDRESSABLE
1344 and a stack slot will be assigned when the rtl is made. */
1345 if (reg == 0)
1346 return;
1347
1348 /* Get the declared mode for this object. */
1349 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1350 : DECL_MODE (decl));
1351 /* Get the mode it's actually stored in. */
1352 promoted_mode = GET_MODE (reg);
1353
1354 /* If this variable comes from an outer function,
1355 find that function's saved context. */
1356 if (context != current_function_decl && context != inline_function_decl)
1357 for (function = outer_function_chain; function; function = function->next)
1358 if (function->decl == context)
1359 break;
1360
1361 /* If this is a variable-size object with a pseudo to address it,
1362 put that pseudo into the stack, if the var is nonlocal. */
1363 if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1364 && GET_CODE (reg) == MEM
1365 && GET_CODE (XEXP (reg, 0)) == REG
1366 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1367 {
1368 reg = XEXP (reg, 0);
1369 decl_mode = promoted_mode = GET_MODE (reg);
1370 }
1371
1372 can_use_addressof
1373 = (function == 0
1374 && optimize > 0
1375 /* FIXME make it work for promoted modes too */
1376 && decl_mode == promoted_mode
1377 #ifdef NON_SAVING_SETJMP
1378 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1379 #endif
1380 );
1381
1382 /* If we can't use ADDRESSOF, make sure we see through one we already
1383 generated. */
1384 if (! can_use_addressof && GET_CODE (reg) == MEM
1385 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1386 reg = XEXP (XEXP (reg, 0), 0);
1387
1388 /* Now we should have a value that resides in one or more pseudo regs. */
1389
1390 if (GET_CODE (reg) == REG)
1391 {
1392 /* If this variable lives in the current function and we don't need
1393 to put things in the stack for the sake of setjmp, try to keep it
1394 in a register until we know we actually need the address. */
1395 if (can_use_addressof)
1396 gen_mem_addressof (reg, decl);
1397 else
1398 put_reg_into_stack (function, reg, TREE_TYPE (decl), promoted_mode,
1399 decl_mode, volatilep, 0, usedp, 0);
1400 }
1401 else if (GET_CODE (reg) == CONCAT)
1402 {
1403 /* A CONCAT contains two pseudos; put them both in the stack.
1404 We do it so they end up consecutive.
1405 We fixup references to the parts only after we fixup references
1406 to the whole CONCAT, lest we do double fixups for the latter
1407 references. */
1408 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1409 tree part_type = type_for_mode (part_mode, 0);
1410 rtx lopart = XEXP (reg, 0);
1411 rtx hipart = XEXP (reg, 1);
1412 #ifdef FRAME_GROWS_DOWNWARD
1413 /* Since part 0 should have a lower address, do it second. */
1414 put_reg_into_stack (function, hipart, part_type, part_mode,
1415 part_mode, volatilep, 0, 0, 0);
1416 put_reg_into_stack (function, lopart, part_type, part_mode,
1417 part_mode, volatilep, 0, 0, 0);
1418 #else
1419 put_reg_into_stack (function, lopart, part_type, part_mode,
1420 part_mode, volatilep, 0, 0, 0);
1421 put_reg_into_stack (function, hipart, part_type, part_mode,
1422 part_mode, volatilep, 0, 0, 0);
1423 #endif
1424
1425 /* Change the CONCAT into a combined MEM for both parts. */
1426 PUT_CODE (reg, MEM);
1427 set_mem_attributes (reg, decl, 1);
1428
1429 /* The two parts are in memory order already.
1430 Use the lower parts address as ours. */
1431 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1432 /* Prevent sharing of rtl that might lose. */
1433 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1434 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1435 if (usedp)
1436 {
1437 schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1438 promoted_mode, 0);
1439 schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1440 schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1441 }
1442 }
1443 else
1444 return;
1445
1446 if (current_function_check_memory_usage)
1447 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK, VOIDmode,
1448 3, XEXP (reg, 0), Pmode,
1449 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1450 TYPE_MODE (sizetype),
1451 GEN_INT (MEMORY_USE_RW),
1452 TYPE_MODE (integer_type_node));
1453 }
1454
1455 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1456 into the stack frame of FUNCTION (0 means the current function).
1457 DECL_MODE is the machine mode of the user-level data type.
1458 PROMOTED_MODE is the machine mode of the register.
1459 VOLATILE_P is nonzero if this is for a "volatile" decl.
1460 USED_P is nonzero if this reg might have already been used in an insn. */
1461
1462 static void
1463 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1464 original_regno, used_p, ht)
1465 struct function *function;
1466 rtx reg;
1467 tree type;
1468 enum machine_mode promoted_mode, decl_mode;
1469 int volatile_p;
1470 unsigned int original_regno;
1471 int used_p;
1472 struct hash_table *ht;
1473 {
1474 struct function *func = function ? function : cfun;
1475 rtx new = 0;
1476 unsigned int regno = original_regno;
1477
1478 if (regno == 0)
1479 regno = REGNO (reg);
1480
1481 if (regno < func->x_max_parm_reg)
1482 new = func->x_parm_reg_stack_loc[regno];
1483
1484 if (new == 0)
1485 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1486
1487 PUT_CODE (reg, MEM);
1488 PUT_MODE (reg, decl_mode);
1489 XEXP (reg, 0) = XEXP (new, 0);
1490 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1491 MEM_VOLATILE_P (reg) = volatile_p;
1492
1493 /* If this is a memory ref that contains aggregate components,
1494 mark it as such for cse and loop optimize. If we are reusing a
1495 previously generated stack slot, then we need to copy the bit in
1496 case it was set for other reasons. For instance, it is set for
1497 __builtin_va_alist. */
1498 if (type)
1499 {
1500 MEM_SET_IN_STRUCT_P (reg,
1501 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1502 MEM_ALIAS_SET (reg) = get_alias_set (type);
1503 }
1504 if (used_p)
1505 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
1506 }
1507
1508 /* Make sure that all refs to the variable, previously made
1509 when it was a register, are fixed up to be valid again.
1510 See function above for meaning of arguments. */
1511 static void
1512 schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1513 struct function *function;
1514 rtx reg;
1515 tree type;
1516 enum machine_mode promoted_mode;
1517 struct hash_table *ht;
1518 {
1519 int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1520
1521 if (function != 0)
1522 {
1523 struct var_refs_queue *temp;
1524
1525 temp
1526 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1527 temp->modified = reg;
1528 temp->promoted_mode = promoted_mode;
1529 temp->unsignedp = unsigned_p;
1530 temp->next = function->fixup_var_refs_queue;
1531 function->fixup_var_refs_queue = temp;
1532 }
1533 else
1534 /* Variable is local; fix it up now. */
1535 fixup_var_refs (reg, promoted_mode, unsigned_p, ht);
1536 }
1537 \f
1538 static void
1539 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1540 rtx var;
1541 enum machine_mode promoted_mode;
1542 int unsignedp;
1543 struct hash_table *ht;
1544 {
1545 tree pending;
1546 rtx first_insn = get_insns ();
1547 struct sequence_stack *stack = seq_stack;
1548 tree rtl_exps = rtl_expr_chain;
1549 rtx insn;
1550
1551 /* Must scan all insns for stack-refs that exceed the limit. */
1552 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1553 stack == 0, ht);
1554 /* If there's a hash table, it must record all uses of VAR. */
1555 if (ht)
1556 return;
1557
1558 /* Scan all pending sequences too. */
1559 for (; stack; stack = stack->next)
1560 {
1561 push_to_sequence (stack->first);
1562 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1563 stack->first, stack->next != 0, 0);
1564 /* Update remembered end of sequence
1565 in case we added an insn at the end. */
1566 stack->last = get_last_insn ();
1567 end_sequence ();
1568 }
1569
1570 /* Scan all waiting RTL_EXPRs too. */
1571 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1572 {
1573 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1574 if (seq != const0_rtx && seq != 0)
1575 {
1576 push_to_sequence (seq);
1577 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0, 0);
1578 end_sequence ();
1579 }
1580 }
1581
1582 /* Scan the catch clauses for exception handling too. */
1583 push_to_full_sequence (catch_clauses, catch_clauses_last);
1584 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses, 0, 0);
1585 end_full_sequence (&catch_clauses, &catch_clauses_last);
1586
1587 /* Scan sequences saved in CALL_PLACEHOLDERS too. */
1588 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1589 {
1590 if (GET_CODE (insn) == CALL_INSN
1591 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1592 {
1593 int i;
1594
1595 /* Look at the Normal call, sibling call and tail recursion
1596 sequences attached to the CALL_PLACEHOLDER. */
1597 for (i = 0; i < 3; i++)
1598 {
1599 rtx seq = XEXP (PATTERN (insn), i);
1600 if (seq)
1601 {
1602 push_to_sequence (seq);
1603 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1604 seq, 0, 0);
1605 XEXP (PATTERN (insn), i) = get_insns ();
1606 end_sequence ();
1607 }
1608 }
1609 }
1610 }
1611 }
1612 \f
1613 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1614 some part of an insn. Return a struct fixup_replacement whose OLD
1615 value is equal to X. Allocate a new structure if no such entry exists. */
1616
1617 static struct fixup_replacement *
1618 find_fixup_replacement (replacements, x)
1619 struct fixup_replacement **replacements;
1620 rtx x;
1621 {
1622 struct fixup_replacement *p;
1623
1624 /* See if we have already replaced this. */
1625 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1626 ;
1627
1628 if (p == 0)
1629 {
1630 p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1631 p->old = x;
1632 p->new = 0;
1633 p->next = *replacements;
1634 *replacements = p;
1635 }
1636
1637 return p;
1638 }
1639
1640 /* Scan the insn-chain starting with INSN for refs to VAR
1641 and fix them up. TOPLEVEL is nonzero if this chain is the
1642 main chain of insns for the current function. */
1643
1644 static void
1645 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1646 rtx var;
1647 enum machine_mode promoted_mode;
1648 int unsignedp;
1649 rtx insn;
1650 int toplevel;
1651 struct hash_table *ht;
1652 {
1653 rtx call_dest = 0;
1654 rtx insn_list = NULL_RTX;
1655
1656 /* If we already know which INSNs reference VAR there's no need
1657 to walk the entire instruction chain. */
1658 if (ht)
1659 {
1660 insn_list = ((struct insns_for_mem_entry *)
1661 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1662 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1663 insn_list = XEXP (insn_list, 1);
1664 }
1665
1666 while (insn)
1667 {
1668 rtx next = NEXT_INSN (insn);
1669 rtx set, prev, prev_set;
1670 rtx note;
1671
1672 if (INSN_P (insn))
1673 {
1674 /* Remember the notes in case we delete the insn. */
1675 note = REG_NOTES (insn);
1676
1677 /* If this is a CLOBBER of VAR, delete it.
1678
1679 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1680 and REG_RETVAL notes too. */
1681 if (GET_CODE (PATTERN (insn)) == CLOBBER
1682 && (XEXP (PATTERN (insn), 0) == var
1683 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1684 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1685 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1686 {
1687 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1688 /* The REG_LIBCALL note will go away since we are going to
1689 turn INSN into a NOTE, so just delete the
1690 corresponding REG_RETVAL note. */
1691 remove_note (XEXP (note, 0),
1692 find_reg_note (XEXP (note, 0), REG_RETVAL,
1693 NULL_RTX));
1694
1695 /* In unoptimized compilation, we shouldn't call delete_insn
1696 except in jump.c doing warnings. */
1697 PUT_CODE (insn, NOTE);
1698 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1699 NOTE_SOURCE_FILE (insn) = 0;
1700 }
1701
1702 /* The insn to load VAR from a home in the arglist
1703 is now a no-op. When we see it, just delete it.
1704 Similarly if this is storing VAR from a register from which
1705 it was loaded in the previous insn. This will occur
1706 when an ADDRESSOF was made for an arglist slot. */
1707 else if (toplevel
1708 && (set = single_set (insn)) != 0
1709 && SET_DEST (set) == var
1710 /* If this represents the result of an insn group,
1711 don't delete the insn. */
1712 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1713 && (rtx_equal_p (SET_SRC (set), var)
1714 || (GET_CODE (SET_SRC (set)) == REG
1715 && (prev = prev_nonnote_insn (insn)) != 0
1716 && (prev_set = single_set (prev)) != 0
1717 && SET_DEST (prev_set) == SET_SRC (set)
1718 && rtx_equal_p (SET_SRC (prev_set), var))))
1719 {
1720 /* In unoptimized compilation, we shouldn't call delete_insn
1721 except in jump.c doing warnings. */
1722 PUT_CODE (insn, NOTE);
1723 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1724 NOTE_SOURCE_FILE (insn) = 0;
1725 if (insn == last_parm_insn)
1726 last_parm_insn = PREV_INSN (next);
1727 }
1728 else
1729 {
1730 struct fixup_replacement *replacements = 0;
1731 rtx next_insn = NEXT_INSN (insn);
1732
1733 if (SMALL_REGISTER_CLASSES)
1734 {
1735 /* If the insn that copies the results of a CALL_INSN
1736 into a pseudo now references VAR, we have to use an
1737 intermediate pseudo since we want the life of the
1738 return value register to be only a single insn.
1739
1740 If we don't use an intermediate pseudo, such things as
1741 address computations to make the address of VAR valid
1742 if it is not can be placed between the CALL_INSN and INSN.
1743
1744 To make sure this doesn't happen, we record the destination
1745 of the CALL_INSN and see if the next insn uses both that
1746 and VAR. */
1747
1748 if (call_dest != 0 && GET_CODE (insn) == INSN
1749 && reg_mentioned_p (var, PATTERN (insn))
1750 && reg_mentioned_p (call_dest, PATTERN (insn)))
1751 {
1752 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1753
1754 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1755
1756 PATTERN (insn) = replace_rtx (PATTERN (insn),
1757 call_dest, temp);
1758 }
1759
1760 if (GET_CODE (insn) == CALL_INSN
1761 && GET_CODE (PATTERN (insn)) == SET)
1762 call_dest = SET_DEST (PATTERN (insn));
1763 else if (GET_CODE (insn) == CALL_INSN
1764 && GET_CODE (PATTERN (insn)) == PARALLEL
1765 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1766 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1767 else
1768 call_dest = 0;
1769 }
1770
1771 /* See if we have to do anything to INSN now that VAR is in
1772 memory. If it needs to be loaded into a pseudo, use a single
1773 pseudo for the entire insn in case there is a MATCH_DUP
1774 between two operands. We pass a pointer to the head of
1775 a list of struct fixup_replacements. If fixup_var_refs_1
1776 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1777 it will record them in this list.
1778
1779 If it allocated a pseudo for any replacement, we copy into
1780 it here. */
1781
1782 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1783 &replacements);
1784
1785 /* If this is last_parm_insn, and any instructions were output
1786 after it to fix it up, then we must set last_parm_insn to
1787 the last such instruction emitted. */
1788 if (insn == last_parm_insn)
1789 last_parm_insn = PREV_INSN (next_insn);
1790
1791 while (replacements)
1792 {
1793 struct fixup_replacement *next;
1794
1795 if (GET_CODE (replacements->new) == REG)
1796 {
1797 rtx insert_before;
1798 rtx seq;
1799
1800 /* OLD might be a (subreg (mem)). */
1801 if (GET_CODE (replacements->old) == SUBREG)
1802 replacements->old
1803 = fixup_memory_subreg (replacements->old, insn, 0);
1804 else
1805 replacements->old
1806 = fixup_stack_1 (replacements->old, insn);
1807
1808 insert_before = insn;
1809
1810 /* If we are changing the mode, do a conversion.
1811 This might be wasteful, but combine.c will
1812 eliminate much of the waste. */
1813
1814 if (GET_MODE (replacements->new)
1815 != GET_MODE (replacements->old))
1816 {
1817 start_sequence ();
1818 convert_move (replacements->new,
1819 replacements->old, unsignedp);
1820 seq = gen_sequence ();
1821 end_sequence ();
1822 }
1823 else
1824 seq = gen_move_insn (replacements->new,
1825 replacements->old);
1826
1827 emit_insn_before (seq, insert_before);
1828 }
1829
1830 next = replacements->next;
1831 free (replacements);
1832 replacements = next;
1833 }
1834 }
1835
1836 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1837 But don't touch other insns referred to by reg-notes;
1838 we will get them elsewhere. */
1839 while (note)
1840 {
1841 if (GET_CODE (note) != INSN_LIST)
1842 XEXP (note, 0)
1843 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1844 note = XEXP (note, 1);
1845 }
1846 }
1847
1848 if (!ht)
1849 insn = next;
1850 else if (insn_list)
1851 {
1852 insn = XEXP (insn_list, 0);
1853 insn_list = XEXP (insn_list, 1);
1854 }
1855 else
1856 insn = NULL_RTX;
1857 }
1858 }
1859 \f
1860 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1861 See if the rtx expression at *LOC in INSN needs to be changed.
1862
1863 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1864 contain a list of original rtx's and replacements. If we find that we need
1865 to modify this insn by replacing a memory reference with a pseudo or by
1866 making a new MEM to implement a SUBREG, we consult that list to see if
1867 we have already chosen a replacement. If none has already been allocated,
1868 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1869 or the SUBREG, as appropriate, to the pseudo. */
1870
1871 static void
1872 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1873 register rtx var;
1874 enum machine_mode promoted_mode;
1875 register rtx *loc;
1876 rtx insn;
1877 struct fixup_replacement **replacements;
1878 {
1879 register int i;
1880 register rtx x = *loc;
1881 RTX_CODE code = GET_CODE (x);
1882 register const char *fmt;
1883 register rtx tem, tem1;
1884 struct fixup_replacement *replacement;
1885
1886 switch (code)
1887 {
1888 case ADDRESSOF:
1889 if (XEXP (x, 0) == var)
1890 {
1891 /* Prevent sharing of rtl that might lose. */
1892 rtx sub = copy_rtx (XEXP (var, 0));
1893
1894 if (! validate_change (insn, loc, sub, 0))
1895 {
1896 rtx y = gen_reg_rtx (GET_MODE (sub));
1897 rtx seq, new_insn;
1898
1899 /* We should be able to replace with a register or all is lost.
1900 Note that we can't use validate_change to verify this, since
1901 we're not caring for replacing all dups simultaneously. */
1902 if (! validate_replace_rtx (*loc, y, insn))
1903 abort ();
1904
1905 /* Careful! First try to recognize a direct move of the
1906 value, mimicking how things are done in gen_reload wrt
1907 PLUS. Consider what happens when insn is a conditional
1908 move instruction and addsi3 clobbers flags. */
1909
1910 start_sequence ();
1911 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1912 seq = gen_sequence ();
1913 end_sequence ();
1914
1915 if (recog_memoized (new_insn) < 0)
1916 {
1917 /* That failed. Fall back on force_operand and hope. */
1918
1919 start_sequence ();
1920 force_operand (sub, y);
1921 seq = gen_sequence ();
1922 end_sequence ();
1923 }
1924
1925 #ifdef HAVE_cc0
1926 /* Don't separate setter from user. */
1927 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1928 insn = PREV_INSN (insn);
1929 #endif
1930
1931 emit_insn_before (seq, insn);
1932 }
1933 }
1934 return;
1935
1936 case MEM:
1937 if (var == x)
1938 {
1939 /* If we already have a replacement, use it. Otherwise,
1940 try to fix up this address in case it is invalid. */
1941
1942 replacement = find_fixup_replacement (replacements, var);
1943 if (replacement->new)
1944 {
1945 *loc = replacement->new;
1946 return;
1947 }
1948
1949 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1950
1951 /* Unless we are forcing memory to register or we changed the mode,
1952 we can leave things the way they are if the insn is valid. */
1953
1954 INSN_CODE (insn) = -1;
1955 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1956 && recog_memoized (insn) >= 0)
1957 return;
1958
1959 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1960 return;
1961 }
1962
1963 /* If X contains VAR, we need to unshare it here so that we update
1964 each occurrence separately. But all identical MEMs in one insn
1965 must be replaced with the same rtx because of the possibility of
1966 MATCH_DUPs. */
1967
1968 if (reg_mentioned_p (var, x))
1969 {
1970 replacement = find_fixup_replacement (replacements, x);
1971 if (replacement->new == 0)
1972 replacement->new = copy_most_rtx (x, var);
1973
1974 *loc = x = replacement->new;
1975 code = GET_CODE (x);
1976 }
1977 break;
1978
1979 case REG:
1980 case CC0:
1981 case PC:
1982 case CONST_INT:
1983 case CONST:
1984 case SYMBOL_REF:
1985 case LABEL_REF:
1986 case CONST_DOUBLE:
1987 return;
1988
1989 case SIGN_EXTRACT:
1990 case ZERO_EXTRACT:
1991 /* Note that in some cases those types of expressions are altered
1992 by optimize_bit_field, and do not survive to get here. */
1993 if (XEXP (x, 0) == var
1994 || (GET_CODE (XEXP (x, 0)) == SUBREG
1995 && SUBREG_REG (XEXP (x, 0)) == var))
1996 {
1997 /* Get TEM as a valid MEM in the mode presently in the insn.
1998
1999 We don't worry about the possibility of MATCH_DUP here; it
2000 is highly unlikely and would be tricky to handle. */
2001
2002 tem = XEXP (x, 0);
2003 if (GET_CODE (tem) == SUBREG)
2004 {
2005 if (GET_MODE_BITSIZE (GET_MODE (tem))
2006 > GET_MODE_BITSIZE (GET_MODE (var)))
2007 {
2008 replacement = find_fixup_replacement (replacements, var);
2009 if (replacement->new == 0)
2010 replacement->new = gen_reg_rtx (GET_MODE (var));
2011 SUBREG_REG (tem) = replacement->new;
2012
2013 /* The following code works only if we have a MEM, so we
2014 need to handle the subreg here. We directly substitute
2015 it assuming that a subreg must be OK here. We already
2016 scheduled a replacement to copy the mem into the
2017 subreg. */
2018 XEXP (x, 0) = tem;
2019 return;
2020 }
2021 else
2022 tem = fixup_memory_subreg (tem, insn, 0);
2023 }
2024 else
2025 tem = fixup_stack_1 (tem, insn);
2026
2027 /* Unless we want to load from memory, get TEM into the proper mode
2028 for an extract from memory. This can only be done if the
2029 extract is at a constant position and length. */
2030
2031 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2032 && GET_CODE (XEXP (x, 2)) == CONST_INT
2033 && ! mode_dependent_address_p (XEXP (tem, 0))
2034 && ! MEM_VOLATILE_P (tem))
2035 {
2036 enum machine_mode wanted_mode = VOIDmode;
2037 enum machine_mode is_mode = GET_MODE (tem);
2038 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2039
2040 #ifdef HAVE_extzv
2041 if (GET_CODE (x) == ZERO_EXTRACT)
2042 {
2043 wanted_mode
2044 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
2045 if (wanted_mode == VOIDmode)
2046 wanted_mode = word_mode;
2047 }
2048 #endif
2049 #ifdef HAVE_extv
2050 if (GET_CODE (x) == SIGN_EXTRACT)
2051 {
2052 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
2053 if (wanted_mode == VOIDmode)
2054 wanted_mode = word_mode;
2055 }
2056 #endif
2057 /* If we have a narrower mode, we can do something. */
2058 if (wanted_mode != VOIDmode
2059 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2060 {
2061 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2062 rtx old_pos = XEXP (x, 2);
2063 rtx newmem;
2064
2065 /* If the bytes and bits are counted differently, we
2066 must adjust the offset. */
2067 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2068 offset = (GET_MODE_SIZE (is_mode)
2069 - GET_MODE_SIZE (wanted_mode) - offset);
2070
2071 pos %= GET_MODE_BITSIZE (wanted_mode);
2072
2073 newmem = gen_rtx_MEM (wanted_mode,
2074 plus_constant (XEXP (tem, 0), offset));
2075 MEM_COPY_ATTRIBUTES (newmem, tem);
2076
2077 /* Make the change and see if the insn remains valid. */
2078 INSN_CODE (insn) = -1;
2079 XEXP (x, 0) = newmem;
2080 XEXP (x, 2) = GEN_INT (pos);
2081
2082 if (recog_memoized (insn) >= 0)
2083 return;
2084
2085 /* Otherwise, restore old position. XEXP (x, 0) will be
2086 restored later. */
2087 XEXP (x, 2) = old_pos;
2088 }
2089 }
2090
2091 /* If we get here, the bitfield extract insn can't accept a memory
2092 reference. Copy the input into a register. */
2093
2094 tem1 = gen_reg_rtx (GET_MODE (tem));
2095 emit_insn_before (gen_move_insn (tem1, tem), insn);
2096 XEXP (x, 0) = tem1;
2097 return;
2098 }
2099 break;
2100
2101 case SUBREG:
2102 if (SUBREG_REG (x) == var)
2103 {
2104 /* If this is a special SUBREG made because VAR was promoted
2105 from a wider mode, replace it with VAR and call ourself
2106 recursively, this time saying that the object previously
2107 had its current mode (by virtue of the SUBREG). */
2108
2109 if (SUBREG_PROMOTED_VAR_P (x))
2110 {
2111 *loc = var;
2112 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2113 return;
2114 }
2115
2116 /* If this SUBREG makes VAR wider, it has become a paradoxical
2117 SUBREG with VAR in memory, but these aren't allowed at this
2118 stage of the compilation. So load VAR into a pseudo and take
2119 a SUBREG of that pseudo. */
2120 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2121 {
2122 replacement = find_fixup_replacement (replacements, var);
2123 if (replacement->new == 0)
2124 replacement->new = gen_reg_rtx (GET_MODE (var));
2125 SUBREG_REG (x) = replacement->new;
2126 return;
2127 }
2128
2129 /* See if we have already found a replacement for this SUBREG.
2130 If so, use it. Otherwise, make a MEM and see if the insn
2131 is recognized. If not, or if we should force MEM into a register,
2132 make a pseudo for this SUBREG. */
2133 replacement = find_fixup_replacement (replacements, x);
2134 if (replacement->new)
2135 {
2136 *loc = replacement->new;
2137 return;
2138 }
2139
2140 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2141
2142 INSN_CODE (insn) = -1;
2143 if (! flag_force_mem && recog_memoized (insn) >= 0)
2144 return;
2145
2146 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2147 return;
2148 }
2149 break;
2150
2151 case SET:
2152 /* First do special simplification of bit-field references. */
2153 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2154 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2155 optimize_bit_field (x, insn, 0);
2156 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2157 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2158 optimize_bit_field (x, insn, NULL_PTR);
2159
2160 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2161 into a register and then store it back out. */
2162 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2163 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2164 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2165 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2166 > GET_MODE_SIZE (GET_MODE (var))))
2167 {
2168 replacement = find_fixup_replacement (replacements, var);
2169 if (replacement->new == 0)
2170 replacement->new = gen_reg_rtx (GET_MODE (var));
2171
2172 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2173 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2174 }
2175
2176 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2177 insn into a pseudo and store the low part of the pseudo into VAR. */
2178 if (GET_CODE (SET_DEST (x)) == SUBREG
2179 && SUBREG_REG (SET_DEST (x)) == var
2180 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2181 > GET_MODE_SIZE (GET_MODE (var))))
2182 {
2183 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2184 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2185 tem)),
2186 insn);
2187 break;
2188 }
2189
2190 {
2191 rtx dest = SET_DEST (x);
2192 rtx src = SET_SRC (x);
2193 #ifdef HAVE_insv
2194 rtx outerdest = dest;
2195 #endif
2196
2197 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2198 || GET_CODE (dest) == SIGN_EXTRACT
2199 || GET_CODE (dest) == ZERO_EXTRACT)
2200 dest = XEXP (dest, 0);
2201
2202 if (GET_CODE (src) == SUBREG)
2203 src = XEXP (src, 0);
2204
2205 /* If VAR does not appear at the top level of the SET
2206 just scan the lower levels of the tree. */
2207
2208 if (src != var && dest != var)
2209 break;
2210
2211 /* We will need to rerecognize this insn. */
2212 INSN_CODE (insn) = -1;
2213
2214 #ifdef HAVE_insv
2215 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2216 {
2217 /* Since this case will return, ensure we fixup all the
2218 operands here. */
2219 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2220 insn, replacements);
2221 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2222 insn, replacements);
2223 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2224 insn, replacements);
2225
2226 tem = XEXP (outerdest, 0);
2227
2228 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2229 that may appear inside a ZERO_EXTRACT.
2230 This was legitimate when the MEM was a REG. */
2231 if (GET_CODE (tem) == SUBREG
2232 && SUBREG_REG (tem) == var)
2233 tem = fixup_memory_subreg (tem, insn, 0);
2234 else
2235 tem = fixup_stack_1 (tem, insn);
2236
2237 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2238 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2239 && ! mode_dependent_address_p (XEXP (tem, 0))
2240 && ! MEM_VOLATILE_P (tem))
2241 {
2242 enum machine_mode wanted_mode;
2243 enum machine_mode is_mode = GET_MODE (tem);
2244 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2245
2246 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2247 if (wanted_mode == VOIDmode)
2248 wanted_mode = word_mode;
2249
2250 /* If we have a narrower mode, we can do something. */
2251 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2252 {
2253 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2254 rtx old_pos = XEXP (outerdest, 2);
2255 rtx newmem;
2256
2257 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2258 offset = (GET_MODE_SIZE (is_mode)
2259 - GET_MODE_SIZE (wanted_mode) - offset);
2260
2261 pos %= GET_MODE_BITSIZE (wanted_mode);
2262
2263 newmem = gen_rtx_MEM (wanted_mode,
2264 plus_constant (XEXP (tem, 0),
2265 offset));
2266 MEM_COPY_ATTRIBUTES (newmem, tem);
2267
2268 /* Make the change and see if the insn remains valid. */
2269 INSN_CODE (insn) = -1;
2270 XEXP (outerdest, 0) = newmem;
2271 XEXP (outerdest, 2) = GEN_INT (pos);
2272
2273 if (recog_memoized (insn) >= 0)
2274 return;
2275
2276 /* Otherwise, restore old position. XEXP (x, 0) will be
2277 restored later. */
2278 XEXP (outerdest, 2) = old_pos;
2279 }
2280 }
2281
2282 /* If we get here, the bit-field store doesn't allow memory
2283 or isn't located at a constant position. Load the value into
2284 a register, do the store, and put it back into memory. */
2285
2286 tem1 = gen_reg_rtx (GET_MODE (tem));
2287 emit_insn_before (gen_move_insn (tem1, tem), insn);
2288 emit_insn_after (gen_move_insn (tem, tem1), insn);
2289 XEXP (outerdest, 0) = tem1;
2290 return;
2291 }
2292 #endif
2293
2294 /* STRICT_LOW_PART is a no-op on memory references
2295 and it can cause combinations to be unrecognizable,
2296 so eliminate it. */
2297
2298 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2299 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2300
2301 /* A valid insn to copy VAR into or out of a register
2302 must be left alone, to avoid an infinite loop here.
2303 If the reference to VAR is by a subreg, fix that up,
2304 since SUBREG is not valid for a memref.
2305 Also fix up the address of the stack slot.
2306
2307 Note that we must not try to recognize the insn until
2308 after we know that we have valid addresses and no
2309 (subreg (mem ...) ...) constructs, since these interfere
2310 with determining the validity of the insn. */
2311
2312 if ((SET_SRC (x) == var
2313 || (GET_CODE (SET_SRC (x)) == SUBREG
2314 && SUBREG_REG (SET_SRC (x)) == var))
2315 && (GET_CODE (SET_DEST (x)) == REG
2316 || (GET_CODE (SET_DEST (x)) == SUBREG
2317 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2318 && GET_MODE (var) == promoted_mode
2319 && x == single_set (insn))
2320 {
2321 rtx pat, last;
2322
2323 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2324 if (replacement->new)
2325 SET_SRC (x) = replacement->new;
2326 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2327 SET_SRC (x) = replacement->new
2328 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2329 else
2330 SET_SRC (x) = replacement->new
2331 = fixup_stack_1 (SET_SRC (x), insn);
2332
2333 if (recog_memoized (insn) >= 0)
2334 return;
2335
2336 /* INSN is not valid, but we know that we want to
2337 copy SET_SRC (x) to SET_DEST (x) in some way. So
2338 we generate the move and see whether it requires more
2339 than one insn. If it does, we emit those insns and
2340 delete INSN. Otherwise, we an just replace the pattern
2341 of INSN; we have already verified above that INSN has
2342 no other function that to do X. */
2343
2344 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2345 if (GET_CODE (pat) == SEQUENCE)
2346 {
2347 last = emit_insn_before (pat, insn);
2348
2349 /* INSN might have REG_RETVAL or other important notes, so
2350 we need to store the pattern of the last insn in the
2351 sequence into INSN similarly to the normal case. LAST
2352 should not have REG_NOTES, but we allow them if INSN has
2353 no REG_NOTES. */
2354 if (REG_NOTES (last) && REG_NOTES (insn))
2355 abort ();
2356 if (REG_NOTES (last))
2357 REG_NOTES (insn) = REG_NOTES (last);
2358 PATTERN (insn) = PATTERN (last);
2359
2360 PUT_CODE (last, NOTE);
2361 NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
2362 NOTE_SOURCE_FILE (last) = 0;
2363 }
2364 else
2365 PATTERN (insn) = pat;
2366
2367 return;
2368 }
2369
2370 if ((SET_DEST (x) == var
2371 || (GET_CODE (SET_DEST (x)) == SUBREG
2372 && SUBREG_REG (SET_DEST (x)) == var))
2373 && (GET_CODE (SET_SRC (x)) == REG
2374 || (GET_CODE (SET_SRC (x)) == SUBREG
2375 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2376 && GET_MODE (var) == promoted_mode
2377 && x == single_set (insn))
2378 {
2379 rtx pat, last;
2380
2381 if (GET_CODE (SET_DEST (x)) == SUBREG)
2382 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2383 else
2384 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2385
2386 if (recog_memoized (insn) >= 0)
2387 return;
2388
2389 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2390 if (GET_CODE (pat) == SEQUENCE)
2391 {
2392 last = emit_insn_before (pat, insn);
2393
2394 /* INSN might have REG_RETVAL or other important notes, so
2395 we need to store the pattern of the last insn in the
2396 sequence into INSN similarly to the normal case. LAST
2397 should not have REG_NOTES, but we allow them if INSN has
2398 no REG_NOTES. */
2399 if (REG_NOTES (last) && REG_NOTES (insn))
2400 abort ();
2401 if (REG_NOTES (last))
2402 REG_NOTES (insn) = REG_NOTES (last);
2403 PATTERN (insn) = PATTERN (last);
2404
2405 PUT_CODE (last, NOTE);
2406 NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
2407 NOTE_SOURCE_FILE (last) = 0;
2408 }
2409 else
2410 PATTERN (insn) = pat;
2411
2412 return;
2413 }
2414
2415 /* Otherwise, storing into VAR must be handled specially
2416 by storing into a temporary and copying that into VAR
2417 with a new insn after this one. Note that this case
2418 will be used when storing into a promoted scalar since
2419 the insn will now have different modes on the input
2420 and output and hence will be invalid (except for the case
2421 of setting it to a constant, which does not need any
2422 change if it is valid). We generate extra code in that case,
2423 but combine.c will eliminate it. */
2424
2425 if (dest == var)
2426 {
2427 rtx temp;
2428 rtx fixeddest = SET_DEST (x);
2429
2430 /* STRICT_LOW_PART can be discarded, around a MEM. */
2431 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2432 fixeddest = XEXP (fixeddest, 0);
2433 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2434 if (GET_CODE (fixeddest) == SUBREG)
2435 {
2436 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2437 promoted_mode = GET_MODE (fixeddest);
2438 }
2439 else
2440 fixeddest = fixup_stack_1 (fixeddest, insn);
2441
2442 temp = gen_reg_rtx (promoted_mode);
2443
2444 emit_insn_after (gen_move_insn (fixeddest,
2445 gen_lowpart (GET_MODE (fixeddest),
2446 temp)),
2447 insn);
2448
2449 SET_DEST (x) = temp;
2450 }
2451 }
2452
2453 default:
2454 break;
2455 }
2456
2457 /* Nothing special about this RTX; fix its operands. */
2458
2459 fmt = GET_RTX_FORMAT (code);
2460 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2461 {
2462 if (fmt[i] == 'e')
2463 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2464 else if (fmt[i] == 'E')
2465 {
2466 register int j;
2467 for (j = 0; j < XVECLEN (x, i); j++)
2468 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2469 insn, replacements);
2470 }
2471 }
2472 }
2473 \f
2474 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2475 return an rtx (MEM:m1 newaddr) which is equivalent.
2476 If any insns must be emitted to compute NEWADDR, put them before INSN.
2477
2478 UNCRITICAL nonzero means accept paradoxical subregs.
2479 This is used for subregs found inside REG_NOTES. */
2480
2481 static rtx
2482 fixup_memory_subreg (x, insn, uncritical)
2483 rtx x;
2484 rtx insn;
2485 int uncritical;
2486 {
2487 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2488 rtx addr = XEXP (SUBREG_REG (x), 0);
2489 enum machine_mode mode = GET_MODE (x);
2490 rtx result;
2491
2492 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2493 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2494 && ! uncritical)
2495 abort ();
2496
2497 if (BYTES_BIG_ENDIAN)
2498 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2499 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2500 addr = plus_constant (addr, offset);
2501 if (!flag_force_addr && memory_address_p (mode, addr))
2502 /* Shortcut if no insns need be emitted. */
2503 return change_address (SUBREG_REG (x), mode, addr);
2504 start_sequence ();
2505 result = change_address (SUBREG_REG (x), mode, addr);
2506 emit_insn_before (gen_sequence (), insn);
2507 end_sequence ();
2508 return result;
2509 }
2510
2511 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2512 Replace subexpressions of X in place.
2513 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2514 Otherwise return X, with its contents possibly altered.
2515
2516 If any insns must be emitted to compute NEWADDR, put them before INSN.
2517
2518 UNCRITICAL is as in fixup_memory_subreg. */
2519
2520 static rtx
2521 walk_fixup_memory_subreg (x, insn, uncritical)
2522 register rtx x;
2523 rtx insn;
2524 int uncritical;
2525 {
2526 register enum rtx_code code;
2527 register const char *fmt;
2528 register int i;
2529
2530 if (x == 0)
2531 return 0;
2532
2533 code = GET_CODE (x);
2534
2535 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2536 return fixup_memory_subreg (x, insn, uncritical);
2537
2538 /* Nothing special about this RTX; fix its operands. */
2539
2540 fmt = GET_RTX_FORMAT (code);
2541 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2542 {
2543 if (fmt[i] == 'e')
2544 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2545 else if (fmt[i] == 'E')
2546 {
2547 register int j;
2548 for (j = 0; j < XVECLEN (x, i); j++)
2549 XVECEXP (x, i, j)
2550 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2551 }
2552 }
2553 return x;
2554 }
2555 \f
2556 /* For each memory ref within X, if it refers to a stack slot
2557 with an out of range displacement, put the address in a temp register
2558 (emitting new insns before INSN to load these registers)
2559 and alter the memory ref to use that register.
2560 Replace each such MEM rtx with a copy, to avoid clobberage. */
2561
2562 static rtx
2563 fixup_stack_1 (x, insn)
2564 rtx x;
2565 rtx insn;
2566 {
2567 register int i;
2568 register RTX_CODE code = GET_CODE (x);
2569 register const char *fmt;
2570
2571 if (code == MEM)
2572 {
2573 register rtx ad = XEXP (x, 0);
2574 /* If we have address of a stack slot but it's not valid
2575 (displacement is too large), compute the sum in a register. */
2576 if (GET_CODE (ad) == PLUS
2577 && GET_CODE (XEXP (ad, 0)) == REG
2578 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2579 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2580 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2581 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2582 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2583 #endif
2584 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2585 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2586 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2587 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2588 {
2589 rtx temp, seq;
2590 if (memory_address_p (GET_MODE (x), ad))
2591 return x;
2592
2593 start_sequence ();
2594 temp = copy_to_reg (ad);
2595 seq = gen_sequence ();
2596 end_sequence ();
2597 emit_insn_before (seq, insn);
2598 return change_address (x, VOIDmode, temp);
2599 }
2600 return x;
2601 }
2602
2603 fmt = GET_RTX_FORMAT (code);
2604 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2605 {
2606 if (fmt[i] == 'e')
2607 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2608 else if (fmt[i] == 'E')
2609 {
2610 register int j;
2611 for (j = 0; j < XVECLEN (x, i); j++)
2612 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2613 }
2614 }
2615 return x;
2616 }
2617 \f
2618 /* Optimization: a bit-field instruction whose field
2619 happens to be a byte or halfword in memory
2620 can be changed to a move instruction.
2621
2622 We call here when INSN is an insn to examine or store into a bit-field.
2623 BODY is the SET-rtx to be altered.
2624
2625 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2626 (Currently this is called only from function.c, and EQUIV_MEM
2627 is always 0.) */
2628
2629 static void
2630 optimize_bit_field (body, insn, equiv_mem)
2631 rtx body;
2632 rtx insn;
2633 rtx *equiv_mem;
2634 {
2635 register rtx bitfield;
2636 int destflag;
2637 rtx seq = 0;
2638 enum machine_mode mode;
2639
2640 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2641 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2642 bitfield = SET_DEST (body), destflag = 1;
2643 else
2644 bitfield = SET_SRC (body), destflag = 0;
2645
2646 /* First check that the field being stored has constant size and position
2647 and is in fact a byte or halfword suitably aligned. */
2648
2649 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2650 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2651 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2652 != BLKmode)
2653 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2654 {
2655 register rtx memref = 0;
2656
2657 /* Now check that the containing word is memory, not a register,
2658 and that it is safe to change the machine mode. */
2659
2660 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2661 memref = XEXP (bitfield, 0);
2662 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2663 && equiv_mem != 0)
2664 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2665 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2666 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2667 memref = SUBREG_REG (XEXP (bitfield, 0));
2668 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2669 && equiv_mem != 0
2670 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2671 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2672
2673 if (memref
2674 && ! mode_dependent_address_p (XEXP (memref, 0))
2675 && ! MEM_VOLATILE_P (memref))
2676 {
2677 /* Now adjust the address, first for any subreg'ing
2678 that we are now getting rid of,
2679 and then for which byte of the word is wanted. */
2680
2681 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2682 rtx insns;
2683
2684 /* Adjust OFFSET to count bits from low-address byte. */
2685 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2686 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2687 - offset - INTVAL (XEXP (bitfield, 1)));
2688
2689 /* Adjust OFFSET to count bytes from low-address byte. */
2690 offset /= BITS_PER_UNIT;
2691 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2692 {
2693 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2694 if (BYTES_BIG_ENDIAN)
2695 offset -= (MIN (UNITS_PER_WORD,
2696 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2697 - MIN (UNITS_PER_WORD,
2698 GET_MODE_SIZE (GET_MODE (memref))));
2699 }
2700
2701 start_sequence ();
2702 memref = change_address (memref, mode,
2703 plus_constant (XEXP (memref, 0), offset));
2704 insns = get_insns ();
2705 end_sequence ();
2706 emit_insns_before (insns, insn);
2707
2708 /* Store this memory reference where
2709 we found the bit field reference. */
2710
2711 if (destflag)
2712 {
2713 validate_change (insn, &SET_DEST (body), memref, 1);
2714 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2715 {
2716 rtx src = SET_SRC (body);
2717 while (GET_CODE (src) == SUBREG
2718 && SUBREG_WORD (src) == 0)
2719 src = SUBREG_REG (src);
2720 if (GET_MODE (src) != GET_MODE (memref))
2721 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2722 validate_change (insn, &SET_SRC (body), src, 1);
2723 }
2724 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2725 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2726 /* This shouldn't happen because anything that didn't have
2727 one of these modes should have got converted explicitly
2728 and then referenced through a subreg.
2729 This is so because the original bit-field was
2730 handled by agg_mode and so its tree structure had
2731 the same mode that memref now has. */
2732 abort ();
2733 }
2734 else
2735 {
2736 rtx dest = SET_DEST (body);
2737
2738 while (GET_CODE (dest) == SUBREG
2739 && SUBREG_WORD (dest) == 0
2740 && (GET_MODE_CLASS (GET_MODE (dest))
2741 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2742 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2743 <= UNITS_PER_WORD))
2744 dest = SUBREG_REG (dest);
2745
2746 validate_change (insn, &SET_DEST (body), dest, 1);
2747
2748 if (GET_MODE (dest) == GET_MODE (memref))
2749 validate_change (insn, &SET_SRC (body), memref, 1);
2750 else
2751 {
2752 /* Convert the mem ref to the destination mode. */
2753 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2754
2755 start_sequence ();
2756 convert_move (newreg, memref,
2757 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2758 seq = get_insns ();
2759 end_sequence ();
2760
2761 validate_change (insn, &SET_SRC (body), newreg, 1);
2762 }
2763 }
2764
2765 /* See if we can convert this extraction or insertion into
2766 a simple move insn. We might not be able to do so if this
2767 was, for example, part of a PARALLEL.
2768
2769 If we succeed, write out any needed conversions. If we fail,
2770 it is hard to guess why we failed, so don't do anything
2771 special; just let the optimization be suppressed. */
2772
2773 if (apply_change_group () && seq)
2774 emit_insns_before (seq, insn);
2775 }
2776 }
2777 }
2778 \f
2779 /* These routines are responsible for converting virtual register references
2780 to the actual hard register references once RTL generation is complete.
2781
2782 The following four variables are used for communication between the
2783 routines. They contain the offsets of the virtual registers from their
2784 respective hard registers. */
2785
2786 static int in_arg_offset;
2787 static int var_offset;
2788 static int dynamic_offset;
2789 static int out_arg_offset;
2790 static int cfa_offset;
2791
2792 /* In most machines, the stack pointer register is equivalent to the bottom
2793 of the stack. */
2794
2795 #ifndef STACK_POINTER_OFFSET
2796 #define STACK_POINTER_OFFSET 0
2797 #endif
2798
2799 /* If not defined, pick an appropriate default for the offset of dynamically
2800 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2801 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2802
2803 #ifndef STACK_DYNAMIC_OFFSET
2804
2805 /* The bottom of the stack points to the actual arguments. If
2806 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2807 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2808 stack space for register parameters is not pushed by the caller, but
2809 rather part of the fixed stack areas and hence not included in
2810 `current_function_outgoing_args_size'. Nevertheless, we must allow
2811 for it when allocating stack dynamic objects. */
2812
2813 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2814 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2815 ((ACCUMULATE_OUTGOING_ARGS \
2816 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2817 + (STACK_POINTER_OFFSET)) \
2818
2819 #else
2820 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2821 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
2822 + (STACK_POINTER_OFFSET))
2823 #endif
2824 #endif
2825
2826 /* On most machines, the CFA coincides with the first incoming parm. */
2827
2828 #ifndef ARG_POINTER_CFA_OFFSET
2829 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2830 #endif
2831
2832 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2833 its address taken. DECL is the decl for the object stored in the
2834 register, for later use if we do need to force REG into the stack.
2835 REG is overwritten by the MEM like in put_reg_into_stack. */
2836
2837 rtx
2838 gen_mem_addressof (reg, decl)
2839 rtx reg;
2840 tree decl;
2841 {
2842 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2843 REGNO (reg), decl);
2844
2845 /* If the original REG was a user-variable, then so is the REG whose
2846 address is being taken. Likewise for unchanging. */
2847 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2848 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2849
2850 PUT_CODE (reg, MEM);
2851 XEXP (reg, 0) = r;
2852 if (decl)
2853 {
2854 tree type = TREE_TYPE (decl);
2855
2856 PUT_MODE (reg, DECL_MODE (decl));
2857 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2858 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2859 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2860
2861 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2862 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2863 }
2864 else
2865 {
2866 /* We have no alias information about this newly created MEM. */
2867 MEM_ALIAS_SET (reg) = 0;
2868
2869 fixup_var_refs (reg, GET_MODE (reg), 0, 0);
2870 }
2871
2872 return reg;
2873 }
2874
2875 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2876
2877 void
2878 flush_addressof (decl)
2879 tree decl;
2880 {
2881 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2882 && DECL_RTL (decl) != 0
2883 && GET_CODE (DECL_RTL (decl)) == MEM
2884 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2885 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2886 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2887 }
2888
2889 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2890
2891 static void
2892 put_addressof_into_stack (r, ht)
2893 rtx r;
2894 struct hash_table *ht;
2895 {
2896 tree decl, type;
2897 int volatile_p, used_p;
2898
2899 rtx reg = XEXP (r, 0);
2900
2901 if (GET_CODE (reg) != REG)
2902 abort ();
2903
2904 decl = ADDRESSOF_DECL (r);
2905 if (decl)
2906 {
2907 type = TREE_TYPE (decl);
2908 volatile_p = (TREE_CODE (decl) != SAVE_EXPR
2909 && TREE_THIS_VOLATILE (decl));
2910 used_p = (TREE_USED (decl)
2911 || (TREE_CODE (decl) != SAVE_EXPR
2912 && DECL_INITIAL (decl) != 0));
2913 }
2914 else
2915 {
2916 type = NULL_TREE;
2917 volatile_p = 0;
2918 used_p = 1;
2919 }
2920
2921 put_reg_into_stack (0, reg, type, GET_MODE (reg), GET_MODE (reg),
2922 volatile_p, ADDRESSOF_REGNO (r), used_p, ht);
2923 }
2924
2925 /* List of replacements made below in purge_addressof_1 when creating
2926 bitfield insertions. */
2927 static rtx purge_bitfield_addressof_replacements;
2928
2929 /* List of replacements made below in purge_addressof_1 for patterns
2930 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2931 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2932 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2933 enough in complex cases, e.g. when some field values can be
2934 extracted by usage MEM with narrower mode. */
2935 static rtx purge_addressof_replacements;
2936
2937 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2938 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2939 the stack. If the function returns FALSE then the replacement could not
2940 be made. */
2941
2942 static boolean
2943 purge_addressof_1 (loc, insn, force, store, ht)
2944 rtx *loc;
2945 rtx insn;
2946 int force, store;
2947 struct hash_table *ht;
2948 {
2949 rtx x;
2950 RTX_CODE code;
2951 int i, j;
2952 const char *fmt;
2953 boolean result = true;
2954
2955 /* Re-start here to avoid recursion in common cases. */
2956 restart:
2957
2958 x = *loc;
2959 if (x == 0)
2960 return true;
2961
2962 code = GET_CODE (x);
2963
2964 /* If we don't return in any of the cases below, we will recurse inside
2965 the RTX, which will normally result in any ADDRESSOF being forced into
2966 memory. */
2967 if (code == SET)
2968 {
2969 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2970 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2971 return result;
2972 }
2973
2974 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2975 {
2976 /* We must create a copy of the rtx because it was created by
2977 overwriting a REG rtx which is always shared. */
2978 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2979 rtx insns;
2980
2981 if (validate_change (insn, loc, sub, 0)
2982 || validate_replace_rtx (x, sub, insn))
2983 return true;
2984
2985 start_sequence ();
2986 sub = force_operand (sub, NULL_RTX);
2987 if (! validate_change (insn, loc, sub, 0)
2988 && ! validate_replace_rtx (x, sub, insn))
2989 abort ();
2990
2991 insns = gen_sequence ();
2992 end_sequence ();
2993 emit_insn_before (insns, insn);
2994 return true;
2995 }
2996
2997 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2998 {
2999 rtx sub = XEXP (XEXP (x, 0), 0);
3000 rtx sub2;
3001
3002 if (GET_CODE (sub) == MEM)
3003 {
3004 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
3005 MEM_COPY_ATTRIBUTES (sub2, sub);
3006 sub = sub2;
3007 }
3008 else if (GET_CODE (sub) == REG
3009 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3010 ;
3011 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3012 {
3013 int size_x, size_sub;
3014
3015 if (!insn)
3016 {
3017 /* When processing REG_NOTES look at the list of
3018 replacements done on the insn to find the register that X
3019 was replaced by. */
3020 rtx tem;
3021
3022 for (tem = purge_bitfield_addressof_replacements;
3023 tem != NULL_RTX;
3024 tem = XEXP (XEXP (tem, 1), 1))
3025 if (rtx_equal_p (x, XEXP (tem, 0)))
3026 {
3027 *loc = XEXP (XEXP (tem, 1), 0);
3028 return true;
3029 }
3030
3031 /* See comment for purge_addressof_replacements. */
3032 for (tem = purge_addressof_replacements;
3033 tem != NULL_RTX;
3034 tem = XEXP (XEXP (tem, 1), 1))
3035 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3036 {
3037 rtx z = XEXP (XEXP (tem, 1), 0);
3038
3039 if (GET_MODE (x) == GET_MODE (z)
3040 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3041 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3042 abort ();
3043
3044 /* It can happen that the note may speak of things
3045 in a wider (or just different) mode than the
3046 code did. This is especially true of
3047 REG_RETVAL. */
3048
3049 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
3050 z = SUBREG_REG (z);
3051
3052 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3053 && (GET_MODE_SIZE (GET_MODE (x))
3054 > GET_MODE_SIZE (GET_MODE (z))))
3055 {
3056 /* This can occur as a result in invalid
3057 pointer casts, e.g. float f; ...
3058 *(long long int *)&f.
3059 ??? We could emit a warning here, but
3060 without a line number that wouldn't be
3061 very helpful. */
3062 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3063 }
3064 else
3065 z = gen_lowpart (GET_MODE (x), z);
3066
3067 *loc = z;
3068 return true;
3069 }
3070
3071 /* Sometimes we may not be able to find the replacement. For
3072 example when the original insn was a MEM in a wider mode,
3073 and the note is part of a sign extension of a narrowed
3074 version of that MEM. Gcc testcase compile/990829-1.c can
3075 generate an example of this siutation. Rather than complain
3076 we return false, which will prompt our caller to remove the
3077 offending note. */
3078 return false;
3079 }
3080
3081 size_x = GET_MODE_BITSIZE (GET_MODE (x));
3082 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3083
3084 /* Don't even consider working with paradoxical subregs,
3085 or the moral equivalent seen here. */
3086 if (size_x <= size_sub
3087 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3088 {
3089 /* Do a bitfield insertion to mirror what would happen
3090 in memory. */
3091
3092 rtx val, seq;
3093
3094 if (store)
3095 {
3096 rtx p = PREV_INSN (insn);
3097
3098 start_sequence ();
3099 val = gen_reg_rtx (GET_MODE (x));
3100 if (! validate_change (insn, loc, val, 0))
3101 {
3102 /* Discard the current sequence and put the
3103 ADDRESSOF on stack. */
3104 end_sequence ();
3105 goto give_up;
3106 }
3107 seq = gen_sequence ();
3108 end_sequence ();
3109 emit_insn_before (seq, insn);
3110 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3111 insn, ht);
3112
3113 start_sequence ();
3114 store_bit_field (sub, size_x, 0, GET_MODE (x),
3115 val, GET_MODE_SIZE (GET_MODE (sub)),
3116 GET_MODE_ALIGNMENT (GET_MODE (sub)));
3117
3118 /* Make sure to unshare any shared rtl that store_bit_field
3119 might have created. */
3120 unshare_all_rtl_again (get_insns ());
3121
3122 seq = gen_sequence ();
3123 end_sequence ();
3124 p = emit_insn_after (seq, insn);
3125 if (NEXT_INSN (insn))
3126 compute_insns_for_mem (NEXT_INSN (insn),
3127 p ? NEXT_INSN (p) : NULL_RTX,
3128 ht);
3129 }
3130 else
3131 {
3132 rtx p = PREV_INSN (insn);
3133
3134 start_sequence ();
3135 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3136 GET_MODE (x), GET_MODE (x),
3137 GET_MODE_SIZE (GET_MODE (sub)),
3138 GET_MODE_SIZE (GET_MODE (sub)));
3139
3140 if (! validate_change (insn, loc, val, 0))
3141 {
3142 /* Discard the current sequence and put the
3143 ADDRESSOF on stack. */
3144 end_sequence ();
3145 goto give_up;
3146 }
3147
3148 seq = gen_sequence ();
3149 end_sequence ();
3150 emit_insn_before (seq, insn);
3151 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3152 insn, ht);
3153 }
3154
3155 /* Remember the replacement so that the same one can be done
3156 on the REG_NOTES. */
3157 purge_bitfield_addressof_replacements
3158 = gen_rtx_EXPR_LIST (VOIDmode, x,
3159 gen_rtx_EXPR_LIST
3160 (VOIDmode, val,
3161 purge_bitfield_addressof_replacements));
3162
3163 /* We replaced with a reg -- all done. */
3164 return true;
3165 }
3166 }
3167
3168 else if (validate_change (insn, loc, sub, 0))
3169 {
3170 /* Remember the replacement so that the same one can be done
3171 on the REG_NOTES. */
3172 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3173 {
3174 rtx tem;
3175
3176 for (tem = purge_addressof_replacements;
3177 tem != NULL_RTX;
3178 tem = XEXP (XEXP (tem, 1), 1))
3179 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3180 {
3181 XEXP (XEXP (tem, 1), 0) = sub;
3182 return true;
3183 }
3184 purge_addressof_replacements
3185 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3186 gen_rtx_EXPR_LIST (VOIDmode, sub,
3187 purge_addressof_replacements));
3188 return true;
3189 }
3190 goto restart;
3191 }
3192 give_up:;
3193 /* else give up and put it into the stack */
3194 }
3195
3196 else if (code == ADDRESSOF)
3197 {
3198 put_addressof_into_stack (x, ht);
3199 return true;
3200 }
3201 else if (code == SET)
3202 {
3203 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3204 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3205 return result;
3206 }
3207
3208 /* Scan all subexpressions. */
3209 fmt = GET_RTX_FORMAT (code);
3210 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3211 {
3212 if (*fmt == 'e')
3213 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3214 else if (*fmt == 'E')
3215 for (j = 0; j < XVECLEN (x, i); j++)
3216 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3217 }
3218
3219 return result;
3220 }
3221
3222 /* Return a new hash table entry in HT. */
3223
3224 static struct hash_entry *
3225 insns_for_mem_newfunc (he, ht, k)
3226 struct hash_entry *he;
3227 struct hash_table *ht;
3228 hash_table_key k ATTRIBUTE_UNUSED;
3229 {
3230 struct insns_for_mem_entry *ifmhe;
3231 if (he)
3232 return he;
3233
3234 ifmhe = ((struct insns_for_mem_entry *)
3235 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3236 ifmhe->insns = NULL_RTX;
3237
3238 return &ifmhe->he;
3239 }
3240
3241 /* Return a hash value for K, a REG. */
3242
3243 static unsigned long
3244 insns_for_mem_hash (k)
3245 hash_table_key k;
3246 {
3247 /* K is really a RTX. Just use the address as the hash value. */
3248 return (unsigned long) k;
3249 }
3250
3251 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3252
3253 static boolean
3254 insns_for_mem_comp (k1, k2)
3255 hash_table_key k1;
3256 hash_table_key k2;
3257 {
3258 return k1 == k2;
3259 }
3260
3261 struct insns_for_mem_walk_info {
3262 /* The hash table that we are using to record which INSNs use which
3263 MEMs. */
3264 struct hash_table *ht;
3265
3266 /* The INSN we are currently proessing. */
3267 rtx insn;
3268
3269 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3270 to find the insns that use the REGs in the ADDRESSOFs. */
3271 int pass;
3272 };
3273
3274 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3275 that might be used in an ADDRESSOF expression, record this INSN in
3276 the hash table given by DATA (which is really a pointer to an
3277 insns_for_mem_walk_info structure). */
3278
3279 static int
3280 insns_for_mem_walk (r, data)
3281 rtx *r;
3282 void *data;
3283 {
3284 struct insns_for_mem_walk_info *ifmwi
3285 = (struct insns_for_mem_walk_info *) data;
3286
3287 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3288 && GET_CODE (XEXP (*r, 0)) == REG)
3289 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3290 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3291 {
3292 /* Lookup this MEM in the hashtable, creating it if necessary. */
3293 struct insns_for_mem_entry *ifme
3294 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3295 *r,
3296 /*create=*/0,
3297 /*copy=*/0);
3298
3299 /* If we have not already recorded this INSN, do so now. Since
3300 we process the INSNs in order, we know that if we have
3301 recorded it it must be at the front of the list. */
3302 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3303 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3304 ifme->insns);
3305 }
3306
3307 return 0;
3308 }
3309
3310 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3311 which REGs in HT. */
3312
3313 static void
3314 compute_insns_for_mem (insns, last_insn, ht)
3315 rtx insns;
3316 rtx last_insn;
3317 struct hash_table *ht;
3318 {
3319 rtx insn;
3320 struct insns_for_mem_walk_info ifmwi;
3321 ifmwi.ht = ht;
3322
3323 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3324 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3325 if (INSN_P (insn))
3326 {
3327 ifmwi.insn = insn;
3328 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3329 }
3330 }
3331
3332 /* Helper function for purge_addressof called through for_each_rtx.
3333 Returns true iff the rtl is an ADDRESSOF. */
3334 static int
3335 is_addressof (rtl, data)
3336 rtx *rtl;
3337 void *data ATTRIBUTE_UNUSED;
3338 {
3339 return GET_CODE (*rtl) == ADDRESSOF;
3340 }
3341
3342 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3343 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3344 stack. */
3345
3346 void
3347 purge_addressof (insns)
3348 rtx insns;
3349 {
3350 rtx insn;
3351 struct hash_table ht;
3352
3353 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3354 requires a fixup pass over the instruction stream to correct
3355 INSNs that depended on the REG being a REG, and not a MEM. But,
3356 these fixup passes are slow. Furthermore, most MEMs are not
3357 mentioned in very many instructions. So, we speed up the process
3358 by pre-calculating which REGs occur in which INSNs; that allows
3359 us to perform the fixup passes much more quickly. */
3360 hash_table_init (&ht,
3361 insns_for_mem_newfunc,
3362 insns_for_mem_hash,
3363 insns_for_mem_comp);
3364 compute_insns_for_mem (insns, NULL_RTX, &ht);
3365
3366 for (insn = insns; insn; insn = NEXT_INSN (insn))
3367 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3368 || GET_CODE (insn) == CALL_INSN)
3369 {
3370 if (! purge_addressof_1 (&PATTERN (insn), insn,
3371 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3372 /* If we could not replace the ADDRESSOFs in the insn,
3373 something is wrong. */
3374 abort ();
3375
3376 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3377 {
3378 /* If we could not replace the ADDRESSOFs in the insn's notes,
3379 we can just remove the offending notes instead. */
3380 rtx note;
3381
3382 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3383 {
3384 /* If we find a REG_RETVAL note then the insn is a libcall.
3385 Such insns must have REG_EQUAL notes as well, in order
3386 for later passes of the compiler to work. So it is not
3387 safe to delete the notes here, and instead we abort. */
3388 if (REG_NOTE_KIND (note) == REG_RETVAL)
3389 abort ();
3390 if (for_each_rtx (&note, is_addressof, NULL))
3391 remove_note (insn, note);
3392 }
3393 }
3394 }
3395
3396 /* Clean up. */
3397 hash_table_free (&ht);
3398 purge_bitfield_addressof_replacements = 0;
3399 purge_addressof_replacements = 0;
3400
3401 /* REGs are shared. purge_addressof will destructively replace a REG
3402 with a MEM, which creates shared MEMs.
3403
3404 Unfortunately, the children of put_reg_into_stack assume that MEMs
3405 referring to the same stack slot are shared (fixup_var_refs and
3406 the associated hash table code).
3407
3408 So, we have to do another unsharing pass after we have flushed any
3409 REGs that had their address taken into the stack.
3410
3411 It may be worth tracking whether or not we converted any REGs into
3412 MEMs to avoid this overhead when it is not needed. */
3413 unshare_all_rtl_again (get_insns ());
3414 }
3415 \f
3416 /* Convert a SET of a hard subreg to a set of the appropriet hard
3417 register. A subroutine of purge_hard_subreg_sets. */
3418
3419 static void
3420 purge_single_hard_subreg_set (pattern)
3421 rtx pattern;
3422 {
3423 rtx reg = SET_DEST (pattern);
3424 enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3425 int word = 0;
3426
3427 while (GET_CODE (reg) == SUBREG)
3428 {
3429 word += SUBREG_WORD (reg);
3430 reg = SUBREG_REG (reg);
3431 }
3432
3433 if (REGNO (reg) < FIRST_PSEUDO_REGISTER)
3434 {
3435 reg = gen_rtx_REG (mode, REGNO (reg) + word);
3436 SET_DEST (pattern) = reg;
3437 }
3438 }
3439
3440 /* Eliminate all occurrences of SETs of hard subregs from INSNS. The
3441 only such SETs that we expect to see are those left in because
3442 integrate can't handle sets of parts of a return value register.
3443
3444 We don't use alter_subreg because we only want to eliminate subregs
3445 of hard registers. */
3446
3447 void
3448 purge_hard_subreg_sets (insn)
3449 rtx insn;
3450 {
3451 for (; insn; insn = NEXT_INSN (insn))
3452 {
3453 if (INSN_P (insn))
3454 {
3455 rtx pattern = PATTERN (insn);
3456 switch (GET_CODE (pattern))
3457 {
3458 case SET:
3459 if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3460 purge_single_hard_subreg_set (pattern);
3461 break;
3462 case PARALLEL:
3463 {
3464 int j;
3465 for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3466 {
3467 rtx inner_pattern = XVECEXP (pattern, 0, j);
3468 if (GET_CODE (inner_pattern) == SET
3469 && GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3470 purge_single_hard_subreg_set (inner_pattern);
3471 }
3472 }
3473 break;
3474 default:
3475 break;
3476 }
3477 }
3478 }
3479 }
3480 \f
3481 /* Pass through the INSNS of function FNDECL and convert virtual register
3482 references to hard register references. */
3483
3484 void
3485 instantiate_virtual_regs (fndecl, insns)
3486 tree fndecl;
3487 rtx insns;
3488 {
3489 rtx insn;
3490 unsigned int i;
3491
3492 /* Compute the offsets to use for this function. */
3493 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3494 var_offset = STARTING_FRAME_OFFSET;
3495 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3496 out_arg_offset = STACK_POINTER_OFFSET;
3497 cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3498
3499 /* Scan all variables and parameters of this function. For each that is
3500 in memory, instantiate all virtual registers if the result is a valid
3501 address. If not, we do it later. That will handle most uses of virtual
3502 regs on many machines. */
3503 instantiate_decls (fndecl, 1);
3504
3505 /* Initialize recognition, indicating that volatile is OK. */
3506 init_recog ();
3507
3508 /* Scan through all the insns, instantiating every virtual register still
3509 present. */
3510 for (insn = insns; insn; insn = NEXT_INSN (insn))
3511 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3512 || GET_CODE (insn) == CALL_INSN)
3513 {
3514 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3515 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3516 }
3517
3518 /* Instantiate the stack slots for the parm registers, for later use in
3519 addressof elimination. */
3520 for (i = 0; i < max_parm_reg; ++i)
3521 if (parm_reg_stack_loc[i])
3522 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3523
3524 /* Now instantiate the remaining register equivalences for debugging info.
3525 These will not be valid addresses. */
3526 instantiate_decls (fndecl, 0);
3527
3528 /* Indicate that, from now on, assign_stack_local should use
3529 frame_pointer_rtx. */
3530 virtuals_instantiated = 1;
3531 }
3532
3533 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3534 all virtual registers in their DECL_RTL's.
3535
3536 If VALID_ONLY, do this only if the resulting address is still valid.
3537 Otherwise, always do it. */
3538
3539 static void
3540 instantiate_decls (fndecl, valid_only)
3541 tree fndecl;
3542 int valid_only;
3543 {
3544 tree decl;
3545
3546 /* Process all parameters of the function. */
3547 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3548 {
3549 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3550
3551 instantiate_decl (DECL_RTL (decl), size, valid_only);
3552
3553 /* If the parameter was promoted, then the incoming RTL mode may be
3554 larger than the declared type size. We must use the larger of
3555 the two sizes. */
3556 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3557 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3558 }
3559
3560 /* Now process all variables defined in the function or its subblocks. */
3561 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3562 }
3563
3564 /* Subroutine of instantiate_decls: Process all decls in the given
3565 BLOCK node and all its subblocks. */
3566
3567 static void
3568 instantiate_decls_1 (let, valid_only)
3569 tree let;
3570 int valid_only;
3571 {
3572 tree t;
3573
3574 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3575 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3576 valid_only);
3577
3578 /* Process all subblocks. */
3579 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3580 instantiate_decls_1 (t, valid_only);
3581 }
3582
3583 /* Subroutine of the preceding procedures: Given RTL representing a
3584 decl and the size of the object, do any instantiation required.
3585
3586 If VALID_ONLY is non-zero, it means that the RTL should only be
3587 changed if the new address is valid. */
3588
3589 static void
3590 instantiate_decl (x, size, valid_only)
3591 rtx x;
3592 HOST_WIDE_INT size;
3593 int valid_only;
3594 {
3595 enum machine_mode mode;
3596 rtx addr;
3597
3598 /* If this is not a MEM, no need to do anything. Similarly if the
3599 address is a constant or a register that is not a virtual register. */
3600
3601 if (x == 0 || GET_CODE (x) != MEM)
3602 return;
3603
3604 addr = XEXP (x, 0);
3605 if (CONSTANT_P (addr)
3606 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3607 || (GET_CODE (addr) == REG
3608 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3609 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3610 return;
3611
3612 /* If we should only do this if the address is valid, copy the address.
3613 We need to do this so we can undo any changes that might make the
3614 address invalid. This copy is unfortunate, but probably can't be
3615 avoided. */
3616
3617 if (valid_only)
3618 addr = copy_rtx (addr);
3619
3620 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3621
3622 if (valid_only && size >= 0)
3623 {
3624 unsigned HOST_WIDE_INT decl_size = size;
3625
3626 /* Now verify that the resulting address is valid for every integer or
3627 floating-point mode up to and including SIZE bytes long. We do this
3628 since the object might be accessed in any mode and frame addresses
3629 are shared. */
3630
3631 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3632 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3633 mode = GET_MODE_WIDER_MODE (mode))
3634 if (! memory_address_p (mode, addr))
3635 return;
3636
3637 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3638 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3639 mode = GET_MODE_WIDER_MODE (mode))
3640 if (! memory_address_p (mode, addr))
3641 return;
3642 }
3643
3644 /* Put back the address now that we have updated it and we either know
3645 it is valid or we don't care whether it is valid. */
3646
3647 XEXP (x, 0) = addr;
3648 }
3649 \f
3650 /* Given a pointer to a piece of rtx and an optional pointer to the
3651 containing object, instantiate any virtual registers present in it.
3652
3653 If EXTRA_INSNS, we always do the replacement and generate
3654 any extra insns before OBJECT. If it zero, we do nothing if replacement
3655 is not valid.
3656
3657 Return 1 if we either had nothing to do or if we were able to do the
3658 needed replacement. Return 0 otherwise; we only return zero if
3659 EXTRA_INSNS is zero.
3660
3661 We first try some simple transformations to avoid the creation of extra
3662 pseudos. */
3663
3664 static int
3665 instantiate_virtual_regs_1 (loc, object, extra_insns)
3666 rtx *loc;
3667 rtx object;
3668 int extra_insns;
3669 {
3670 rtx x;
3671 RTX_CODE code;
3672 rtx new = 0;
3673 HOST_WIDE_INT offset = 0;
3674 rtx temp;
3675 rtx seq;
3676 int i, j;
3677 const char *fmt;
3678
3679 /* Re-start here to avoid recursion in common cases. */
3680 restart:
3681
3682 x = *loc;
3683 if (x == 0)
3684 return 1;
3685
3686 code = GET_CODE (x);
3687
3688 /* Check for some special cases. */
3689 switch (code)
3690 {
3691 case CONST_INT:
3692 case CONST_DOUBLE:
3693 case CONST:
3694 case SYMBOL_REF:
3695 case CODE_LABEL:
3696 case PC:
3697 case CC0:
3698 case ASM_INPUT:
3699 case ADDR_VEC:
3700 case ADDR_DIFF_VEC:
3701 case RETURN:
3702 return 1;
3703
3704 case SET:
3705 /* We are allowed to set the virtual registers. This means that
3706 the actual register should receive the source minus the
3707 appropriate offset. This is used, for example, in the handling
3708 of non-local gotos. */
3709 if (SET_DEST (x) == virtual_incoming_args_rtx)
3710 new = arg_pointer_rtx, offset = -in_arg_offset;
3711 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3712 new = frame_pointer_rtx, offset = -var_offset;
3713 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3714 new = stack_pointer_rtx, offset = -dynamic_offset;
3715 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3716 new = stack_pointer_rtx, offset = -out_arg_offset;
3717 else if (SET_DEST (x) == virtual_cfa_rtx)
3718 new = arg_pointer_rtx, offset = -cfa_offset;
3719
3720 if (new)
3721 {
3722 rtx src = SET_SRC (x);
3723
3724 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3725
3726 /* The only valid sources here are PLUS or REG. Just do
3727 the simplest possible thing to handle them. */
3728 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3729 abort ();
3730
3731 start_sequence ();
3732 if (GET_CODE (src) != REG)
3733 temp = force_operand (src, NULL_RTX);
3734 else
3735 temp = src;
3736 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3737 seq = get_insns ();
3738 end_sequence ();
3739
3740 emit_insns_before (seq, object);
3741 SET_DEST (x) = new;
3742
3743 if (! validate_change (object, &SET_SRC (x), temp, 0)
3744 || ! extra_insns)
3745 abort ();
3746
3747 return 1;
3748 }
3749
3750 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3751 loc = &SET_SRC (x);
3752 goto restart;
3753
3754 case PLUS:
3755 /* Handle special case of virtual register plus constant. */
3756 if (CONSTANT_P (XEXP (x, 1)))
3757 {
3758 rtx old, new_offset;
3759
3760 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3761 if (GET_CODE (XEXP (x, 0)) == PLUS)
3762 {
3763 rtx inner = XEXP (XEXP (x, 0), 0);
3764
3765 if (inner == virtual_incoming_args_rtx)
3766 new = arg_pointer_rtx, offset = in_arg_offset;
3767 else if (inner == virtual_stack_vars_rtx)
3768 new = frame_pointer_rtx, offset = var_offset;
3769 else if (inner == virtual_stack_dynamic_rtx)
3770 new = stack_pointer_rtx, offset = dynamic_offset;
3771 else if (inner == virtual_outgoing_args_rtx)
3772 new = stack_pointer_rtx, offset = out_arg_offset;
3773 else if (inner == virtual_cfa_rtx)
3774 new = arg_pointer_rtx, offset = cfa_offset;
3775 else
3776 {
3777 loc = &XEXP (x, 0);
3778 goto restart;
3779 }
3780
3781 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3782 extra_insns);
3783 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3784 }
3785
3786 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3787 new = arg_pointer_rtx, offset = in_arg_offset;
3788 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3789 new = frame_pointer_rtx, offset = var_offset;
3790 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3791 new = stack_pointer_rtx, offset = dynamic_offset;
3792 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3793 new = stack_pointer_rtx, offset = out_arg_offset;
3794 else if (XEXP (x, 0) == virtual_cfa_rtx)
3795 new = arg_pointer_rtx, offset = cfa_offset;
3796 else
3797 {
3798 /* We know the second operand is a constant. Unless the
3799 first operand is a REG (which has been already checked),
3800 it needs to be checked. */
3801 if (GET_CODE (XEXP (x, 0)) != REG)
3802 {
3803 loc = &XEXP (x, 0);
3804 goto restart;
3805 }
3806 return 1;
3807 }
3808
3809 new_offset = plus_constant (XEXP (x, 1), offset);
3810
3811 /* If the new constant is zero, try to replace the sum with just
3812 the register. */
3813 if (new_offset == const0_rtx
3814 && validate_change (object, loc, new, 0))
3815 return 1;
3816
3817 /* Next try to replace the register and new offset.
3818 There are two changes to validate here and we can't assume that
3819 in the case of old offset equals new just changing the register
3820 will yield a valid insn. In the interests of a little efficiency,
3821 however, we only call validate change once (we don't queue up the
3822 changes and then call apply_change_group). */
3823
3824 old = XEXP (x, 0);
3825 if (offset == 0
3826 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3827 : (XEXP (x, 0) = new,
3828 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3829 {
3830 if (! extra_insns)
3831 {
3832 XEXP (x, 0) = old;
3833 return 0;
3834 }
3835
3836 /* Otherwise copy the new constant into a register and replace
3837 constant with that register. */
3838 temp = gen_reg_rtx (Pmode);
3839 XEXP (x, 0) = new;
3840 if (validate_change (object, &XEXP (x, 1), temp, 0))
3841 emit_insn_before (gen_move_insn (temp, new_offset), object);
3842 else
3843 {
3844 /* If that didn't work, replace this expression with a
3845 register containing the sum. */
3846
3847 XEXP (x, 0) = old;
3848 new = gen_rtx_PLUS (Pmode, new, new_offset);
3849
3850 start_sequence ();
3851 temp = force_operand (new, NULL_RTX);
3852 seq = get_insns ();
3853 end_sequence ();
3854
3855 emit_insns_before (seq, object);
3856 if (! validate_change (object, loc, temp, 0)
3857 && ! validate_replace_rtx (x, temp, object))
3858 abort ();
3859 }
3860 }
3861
3862 return 1;
3863 }
3864
3865 /* Fall through to generic two-operand expression case. */
3866 case EXPR_LIST:
3867 case CALL:
3868 case COMPARE:
3869 case MINUS:
3870 case MULT:
3871 case DIV: case UDIV:
3872 case MOD: case UMOD:
3873 case AND: case IOR: case XOR:
3874 case ROTATERT: case ROTATE:
3875 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3876 case NE: case EQ:
3877 case GE: case GT: case GEU: case GTU:
3878 case LE: case LT: case LEU: case LTU:
3879 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3880 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3881 loc = &XEXP (x, 0);
3882 goto restart;
3883
3884 case MEM:
3885 /* Most cases of MEM that convert to valid addresses have already been
3886 handled by our scan of decls. The only special handling we
3887 need here is to make a copy of the rtx to ensure it isn't being
3888 shared if we have to change it to a pseudo.
3889
3890 If the rtx is a simple reference to an address via a virtual register,
3891 it can potentially be shared. In such cases, first try to make it
3892 a valid address, which can also be shared. Otherwise, copy it and
3893 proceed normally.
3894
3895 First check for common cases that need no processing. These are
3896 usually due to instantiation already being done on a previous instance
3897 of a shared rtx. */
3898
3899 temp = XEXP (x, 0);
3900 if (CONSTANT_ADDRESS_P (temp)
3901 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3902 || temp == arg_pointer_rtx
3903 #endif
3904 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3905 || temp == hard_frame_pointer_rtx
3906 #endif
3907 || temp == frame_pointer_rtx)
3908 return 1;
3909
3910 if (GET_CODE (temp) == PLUS
3911 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3912 && (XEXP (temp, 0) == frame_pointer_rtx
3913 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3914 || XEXP (temp, 0) == hard_frame_pointer_rtx
3915 #endif
3916 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3917 || XEXP (temp, 0) == arg_pointer_rtx
3918 #endif
3919 ))
3920 return 1;
3921
3922 if (temp == virtual_stack_vars_rtx
3923 || temp == virtual_incoming_args_rtx
3924 || (GET_CODE (temp) == PLUS
3925 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3926 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3927 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3928 {
3929 /* This MEM may be shared. If the substitution can be done without
3930 the need to generate new pseudos, we want to do it in place
3931 so all copies of the shared rtx benefit. The call below will
3932 only make substitutions if the resulting address is still
3933 valid.
3934
3935 Note that we cannot pass X as the object in the recursive call
3936 since the insn being processed may not allow all valid
3937 addresses. However, if we were not passed on object, we can
3938 only modify X without copying it if X will have a valid
3939 address.
3940
3941 ??? Also note that this can still lose if OBJECT is an insn that
3942 has less restrictions on an address that some other insn.
3943 In that case, we will modify the shared address. This case
3944 doesn't seem very likely, though. One case where this could
3945 happen is in the case of a USE or CLOBBER reference, but we
3946 take care of that below. */
3947
3948 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3949 object ? object : x, 0))
3950 return 1;
3951
3952 /* Otherwise make a copy and process that copy. We copy the entire
3953 RTL expression since it might be a PLUS which could also be
3954 shared. */
3955 *loc = x = copy_rtx (x);
3956 }
3957
3958 /* Fall through to generic unary operation case. */
3959 case SUBREG:
3960 case STRICT_LOW_PART:
3961 case NEG: case NOT:
3962 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3963 case SIGN_EXTEND: case ZERO_EXTEND:
3964 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3965 case FLOAT: case FIX:
3966 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3967 case ABS:
3968 case SQRT:
3969 case FFS:
3970 /* These case either have just one operand or we know that we need not
3971 check the rest of the operands. */
3972 loc = &XEXP (x, 0);
3973 goto restart;
3974
3975 case USE:
3976 case CLOBBER:
3977 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3978 go ahead and make the invalid one, but do it to a copy. For a REG,
3979 just make the recursive call, since there's no chance of a problem. */
3980
3981 if ((GET_CODE (XEXP (x, 0)) == MEM
3982 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3983 0))
3984 || (GET_CODE (XEXP (x, 0)) == REG
3985 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3986 return 1;
3987
3988 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3989 loc = &XEXP (x, 0);
3990 goto restart;
3991
3992 case REG:
3993 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3994 in front of this insn and substitute the temporary. */
3995 if (x == virtual_incoming_args_rtx)
3996 new = arg_pointer_rtx, offset = in_arg_offset;
3997 else if (x == virtual_stack_vars_rtx)
3998 new = frame_pointer_rtx, offset = var_offset;
3999 else if (x == virtual_stack_dynamic_rtx)
4000 new = stack_pointer_rtx, offset = dynamic_offset;
4001 else if (x == virtual_outgoing_args_rtx)
4002 new = stack_pointer_rtx, offset = out_arg_offset;
4003 else if (x == virtual_cfa_rtx)
4004 new = arg_pointer_rtx, offset = cfa_offset;
4005
4006 if (new)
4007 {
4008 temp = plus_constant (new, offset);
4009 if (!validate_change (object, loc, temp, 0))
4010 {
4011 if (! extra_insns)
4012 return 0;
4013
4014 start_sequence ();
4015 temp = force_operand (temp, NULL_RTX);
4016 seq = get_insns ();
4017 end_sequence ();
4018
4019 emit_insns_before (seq, object);
4020 if (! validate_change (object, loc, temp, 0)
4021 && ! validate_replace_rtx (x, temp, object))
4022 abort ();
4023 }
4024 }
4025
4026 return 1;
4027
4028 case ADDRESSOF:
4029 if (GET_CODE (XEXP (x, 0)) == REG)
4030 return 1;
4031
4032 else if (GET_CODE (XEXP (x, 0)) == MEM)
4033 {
4034 /* If we have a (addressof (mem ..)), do any instantiation inside
4035 since we know we'll be making the inside valid when we finally
4036 remove the ADDRESSOF. */
4037 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4038 return 1;
4039 }
4040 break;
4041
4042 default:
4043 break;
4044 }
4045
4046 /* Scan all subexpressions. */
4047 fmt = GET_RTX_FORMAT (code);
4048 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4049 if (*fmt == 'e')
4050 {
4051 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4052 return 0;
4053 }
4054 else if (*fmt == 'E')
4055 for (j = 0; j < XVECLEN (x, i); j++)
4056 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4057 extra_insns))
4058 return 0;
4059
4060 return 1;
4061 }
4062 \f
4063 /* Optimization: assuming this function does not receive nonlocal gotos,
4064 delete the handlers for such, as well as the insns to establish
4065 and disestablish them. */
4066
4067 static void
4068 delete_handlers ()
4069 {
4070 rtx insn;
4071 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4072 {
4073 /* Delete the handler by turning off the flag that would
4074 prevent jump_optimize from deleting it.
4075 Also permit deletion of the nonlocal labels themselves
4076 if nothing local refers to them. */
4077 if (GET_CODE (insn) == CODE_LABEL)
4078 {
4079 tree t, last_t;
4080
4081 LABEL_PRESERVE_P (insn) = 0;
4082
4083 /* Remove it from the nonlocal_label list, to avoid confusing
4084 flow. */
4085 for (t = nonlocal_labels, last_t = 0; t;
4086 last_t = t, t = TREE_CHAIN (t))
4087 if (DECL_RTL (TREE_VALUE (t)) == insn)
4088 break;
4089 if (t)
4090 {
4091 if (! last_t)
4092 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4093 else
4094 TREE_CHAIN (last_t) = TREE_CHAIN (t);
4095 }
4096 }
4097 if (GET_CODE (insn) == INSN)
4098 {
4099 int can_delete = 0;
4100 rtx t;
4101 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4102 if (reg_mentioned_p (t, PATTERN (insn)))
4103 {
4104 can_delete = 1;
4105 break;
4106 }
4107 if (can_delete
4108 || (nonlocal_goto_stack_level != 0
4109 && reg_mentioned_p (nonlocal_goto_stack_level,
4110 PATTERN (insn))))
4111 delete_insn (insn);
4112 }
4113 }
4114 }
4115 \f
4116 int
4117 max_parm_reg_num ()
4118 {
4119 return max_parm_reg;
4120 }
4121
4122 /* Return the first insn following those generated by `assign_parms'. */
4123
4124 rtx
4125 get_first_nonparm_insn ()
4126 {
4127 if (last_parm_insn)
4128 return NEXT_INSN (last_parm_insn);
4129 return get_insns ();
4130 }
4131
4132 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4133 Crash if there is none. */
4134
4135 rtx
4136 get_first_block_beg ()
4137 {
4138 register rtx searcher;
4139 register rtx insn = get_first_nonparm_insn ();
4140
4141 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4142 if (GET_CODE (searcher) == NOTE
4143 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4144 return searcher;
4145
4146 abort (); /* Invalid call to this function. (See comments above.) */
4147 return NULL_RTX;
4148 }
4149
4150 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4151 This means a type for which function calls must pass an address to the
4152 function or get an address back from the function.
4153 EXP may be a type node or an expression (whose type is tested). */
4154
4155 int
4156 aggregate_value_p (exp)
4157 tree exp;
4158 {
4159 int i, regno, nregs;
4160 rtx reg;
4161
4162 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4163
4164 if (TREE_CODE (type) == VOID_TYPE)
4165 return 0;
4166 if (RETURN_IN_MEMORY (type))
4167 return 1;
4168 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4169 and thus can't be returned in registers. */
4170 if (TREE_ADDRESSABLE (type))
4171 return 1;
4172 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4173 return 1;
4174 /* Make sure we have suitable call-clobbered regs to return
4175 the value in; if not, we must return it in memory. */
4176 reg = hard_function_value (type, 0, 0);
4177
4178 /* If we have something other than a REG (e.g. a PARALLEL), then assume
4179 it is OK. */
4180 if (GET_CODE (reg) != REG)
4181 return 0;
4182
4183 regno = REGNO (reg);
4184 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4185 for (i = 0; i < nregs; i++)
4186 if (! call_used_regs[regno + i])
4187 return 1;
4188 return 0;
4189 }
4190 \f
4191 /* Assign RTL expressions to the function's parameters.
4192 This may involve copying them into registers and using
4193 those registers as the RTL for them. */
4194
4195 void
4196 assign_parms (fndecl)
4197 tree fndecl;
4198 {
4199 register tree parm;
4200 register rtx entry_parm = 0;
4201 register rtx stack_parm = 0;
4202 CUMULATIVE_ARGS args_so_far;
4203 enum machine_mode promoted_mode, passed_mode;
4204 enum machine_mode nominal_mode, promoted_nominal_mode;
4205 int unsignedp;
4206 /* Total space needed so far for args on the stack,
4207 given as a constant and a tree-expression. */
4208 struct args_size stack_args_size;
4209 tree fntype = TREE_TYPE (fndecl);
4210 tree fnargs = DECL_ARGUMENTS (fndecl);
4211 /* This is used for the arg pointer when referring to stack args. */
4212 rtx internal_arg_pointer;
4213 /* This is a dummy PARM_DECL that we used for the function result if
4214 the function returns a structure. */
4215 tree function_result_decl = 0;
4216 #ifdef SETUP_INCOMING_VARARGS
4217 int varargs_setup = 0;
4218 #endif
4219 rtx conversion_insns = 0;
4220 struct args_size alignment_pad;
4221
4222 /* Nonzero if the last arg is named `__builtin_va_alist',
4223 which is used on some machines for old-fashioned non-ANSI varargs.h;
4224 this should be stuck onto the stack as if it had arrived there. */
4225 int hide_last_arg
4226 = (current_function_varargs
4227 && fnargs
4228 && (parm = tree_last (fnargs)) != 0
4229 && DECL_NAME (parm)
4230 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4231 "__builtin_va_alist")));
4232
4233 /* Nonzero if function takes extra anonymous args.
4234 This means the last named arg must be on the stack
4235 right before the anonymous ones. */
4236 int stdarg
4237 = (TYPE_ARG_TYPES (fntype) != 0
4238 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4239 != void_type_node));
4240
4241 current_function_stdarg = stdarg;
4242
4243 /* If the reg that the virtual arg pointer will be translated into is
4244 not a fixed reg or is the stack pointer, make a copy of the virtual
4245 arg pointer, and address parms via the copy. The frame pointer is
4246 considered fixed even though it is not marked as such.
4247
4248 The second time through, simply use ap to avoid generating rtx. */
4249
4250 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4251 || ! (fixed_regs[ARG_POINTER_REGNUM]
4252 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4253 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4254 else
4255 internal_arg_pointer = virtual_incoming_args_rtx;
4256 current_function_internal_arg_pointer = internal_arg_pointer;
4257
4258 stack_args_size.constant = 0;
4259 stack_args_size.var = 0;
4260
4261 /* If struct value address is treated as the first argument, make it so. */
4262 if (aggregate_value_p (DECL_RESULT (fndecl))
4263 && ! current_function_returns_pcc_struct
4264 && struct_value_incoming_rtx == 0)
4265 {
4266 tree type = build_pointer_type (TREE_TYPE (fntype));
4267
4268 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4269
4270 DECL_ARG_TYPE (function_result_decl) = type;
4271 TREE_CHAIN (function_result_decl) = fnargs;
4272 fnargs = function_result_decl;
4273 }
4274
4275 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4276 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4277
4278 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4279 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4280 #else
4281 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4282 #endif
4283
4284 /* We haven't yet found an argument that we must push and pretend the
4285 caller did. */
4286 current_function_pretend_args_size = 0;
4287
4288 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4289 {
4290 struct args_size stack_offset;
4291 struct args_size arg_size;
4292 int passed_pointer = 0;
4293 int did_conversion = 0;
4294 tree passed_type = DECL_ARG_TYPE (parm);
4295 tree nominal_type = TREE_TYPE (parm);
4296 int pretend_named;
4297
4298 /* Set LAST_NAMED if this is last named arg before some
4299 anonymous args. */
4300 int last_named = ((TREE_CHAIN (parm) == 0
4301 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4302 && (stdarg || current_function_varargs));
4303 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4304 most machines, if this is a varargs/stdarg function, then we treat
4305 the last named arg as if it were anonymous too. */
4306 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4307
4308 if (TREE_TYPE (parm) == error_mark_node
4309 /* This can happen after weird syntax errors
4310 or if an enum type is defined among the parms. */
4311 || TREE_CODE (parm) != PARM_DECL
4312 || passed_type == NULL)
4313 {
4314 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4315 = gen_rtx_MEM (BLKmode, const0_rtx);
4316 TREE_USED (parm) = 1;
4317 continue;
4318 }
4319
4320 /* For varargs.h function, save info about regs and stack space
4321 used by the individual args, not including the va_alist arg. */
4322 if (hide_last_arg && last_named)
4323 current_function_args_info = args_so_far;
4324
4325 /* Find mode of arg as it is passed, and mode of arg
4326 as it should be during execution of this function. */
4327 passed_mode = TYPE_MODE (passed_type);
4328 nominal_mode = TYPE_MODE (nominal_type);
4329
4330 /* If the parm's mode is VOID, its value doesn't matter,
4331 and avoid the usual things like emit_move_insn that could crash. */
4332 if (nominal_mode == VOIDmode)
4333 {
4334 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4335 continue;
4336 }
4337
4338 /* If the parm is to be passed as a transparent union, use the
4339 type of the first field for the tests below. We have already
4340 verified that the modes are the same. */
4341 if (DECL_TRANSPARENT_UNION (parm)
4342 || (TREE_CODE (passed_type) == UNION_TYPE
4343 && TYPE_TRANSPARENT_UNION (passed_type)))
4344 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4345
4346 /* See if this arg was passed by invisible reference. It is if
4347 it is an object whose size depends on the contents of the
4348 object itself or if the machine requires these objects be passed
4349 that way. */
4350
4351 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4352 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4353 || TREE_ADDRESSABLE (passed_type)
4354 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4355 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4356 passed_type, named_arg)
4357 #endif
4358 )
4359 {
4360 passed_type = nominal_type = build_pointer_type (passed_type);
4361 passed_pointer = 1;
4362 passed_mode = nominal_mode = Pmode;
4363 }
4364
4365 promoted_mode = passed_mode;
4366
4367 #ifdef PROMOTE_FUNCTION_ARGS
4368 /* Compute the mode in which the arg is actually extended to. */
4369 unsignedp = TREE_UNSIGNED (passed_type);
4370 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4371 #endif
4372
4373 /* Let machine desc say which reg (if any) the parm arrives in.
4374 0 means it arrives on the stack. */
4375 #ifdef FUNCTION_INCOMING_ARG
4376 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4377 passed_type, named_arg);
4378 #else
4379 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4380 passed_type, named_arg);
4381 #endif
4382
4383 if (entry_parm == 0)
4384 promoted_mode = passed_mode;
4385
4386 #ifdef SETUP_INCOMING_VARARGS
4387 /* If this is the last named parameter, do any required setup for
4388 varargs or stdargs. We need to know about the case of this being an
4389 addressable type, in which case we skip the registers it
4390 would have arrived in.
4391
4392 For stdargs, LAST_NAMED will be set for two parameters, the one that
4393 is actually the last named, and the dummy parameter. We only
4394 want to do this action once.
4395
4396 Also, indicate when RTL generation is to be suppressed. */
4397 if (last_named && !varargs_setup)
4398 {
4399 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4400 current_function_pretend_args_size, 0);
4401 varargs_setup = 1;
4402 }
4403 #endif
4404
4405 /* Determine parm's home in the stack,
4406 in case it arrives in the stack or we should pretend it did.
4407
4408 Compute the stack position and rtx where the argument arrives
4409 and its size.
4410
4411 There is one complexity here: If this was a parameter that would
4412 have been passed in registers, but wasn't only because it is
4413 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4414 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4415 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4416 0 as it was the previous time. */
4417
4418 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4419 locate_and_pad_parm (promoted_mode, passed_type,
4420 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4421 1,
4422 #else
4423 #ifdef FUNCTION_INCOMING_ARG
4424 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4425 passed_type,
4426 pretend_named) != 0,
4427 #else
4428 FUNCTION_ARG (args_so_far, promoted_mode,
4429 passed_type,
4430 pretend_named) != 0,
4431 #endif
4432 #endif
4433 fndecl, &stack_args_size, &stack_offset, &arg_size,
4434 &alignment_pad);
4435
4436 {
4437 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4438
4439 if (offset_rtx == const0_rtx)
4440 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4441 else
4442 stack_parm = gen_rtx_MEM (promoted_mode,
4443 gen_rtx_PLUS (Pmode,
4444 internal_arg_pointer,
4445 offset_rtx));
4446
4447 set_mem_attributes (stack_parm, parm, 1);
4448 }
4449
4450 /* If this parameter was passed both in registers and in the stack,
4451 use the copy on the stack. */
4452 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4453 entry_parm = 0;
4454
4455 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4456 /* If this parm was passed part in regs and part in memory,
4457 pretend it arrived entirely in memory
4458 by pushing the register-part onto the stack.
4459
4460 In the special case of a DImode or DFmode that is split,
4461 we could put it together in a pseudoreg directly,
4462 but for now that's not worth bothering with. */
4463
4464 if (entry_parm)
4465 {
4466 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4467 passed_type, named_arg);
4468
4469 if (nregs > 0)
4470 {
4471 current_function_pretend_args_size
4472 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4473 / (PARM_BOUNDARY / BITS_PER_UNIT)
4474 * (PARM_BOUNDARY / BITS_PER_UNIT));
4475
4476 /* Handle calls that pass values in multiple non-contiguous
4477 locations. The Irix 6 ABI has examples of this. */
4478 if (GET_CODE (entry_parm) == PARALLEL)
4479 emit_group_store (validize_mem (stack_parm), entry_parm,
4480 int_size_in_bytes (TREE_TYPE (parm)),
4481 TYPE_ALIGN (TREE_TYPE (parm)));
4482
4483 else
4484 move_block_from_reg (REGNO (entry_parm),
4485 validize_mem (stack_parm), nregs,
4486 int_size_in_bytes (TREE_TYPE (parm)));
4487
4488 entry_parm = stack_parm;
4489 }
4490 }
4491 #endif
4492
4493 /* If we didn't decide this parm came in a register,
4494 by default it came on the stack. */
4495 if (entry_parm == 0)
4496 entry_parm = stack_parm;
4497
4498 /* Record permanently how this parm was passed. */
4499 DECL_INCOMING_RTL (parm) = entry_parm;
4500
4501 /* If there is actually space on the stack for this parm,
4502 count it in stack_args_size; otherwise set stack_parm to 0
4503 to indicate there is no preallocated stack slot for the parm. */
4504
4505 if (entry_parm == stack_parm
4506 || (GET_CODE (entry_parm) == PARALLEL
4507 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4508 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4509 /* On some machines, even if a parm value arrives in a register
4510 there is still an (uninitialized) stack slot allocated for it.
4511
4512 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4513 whether this parameter already has a stack slot allocated,
4514 because an arg block exists only if current_function_args_size
4515 is larger than some threshold, and we haven't calculated that
4516 yet. So, for now, we just assume that stack slots never exist
4517 in this case. */
4518 || REG_PARM_STACK_SPACE (fndecl) > 0
4519 #endif
4520 )
4521 {
4522 stack_args_size.constant += arg_size.constant;
4523 if (arg_size.var)
4524 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4525 }
4526 else
4527 /* No stack slot was pushed for this parm. */
4528 stack_parm = 0;
4529
4530 /* Update info on where next arg arrives in registers. */
4531
4532 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4533 passed_type, named_arg);
4534
4535 /* If we can't trust the parm stack slot to be aligned enough
4536 for its ultimate type, don't use that slot after entry.
4537 We'll make another stack slot, if we need one. */
4538 {
4539 unsigned int thisparm_boundary
4540 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4541
4542 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4543 stack_parm = 0;
4544 }
4545
4546 /* If parm was passed in memory, and we need to convert it on entry,
4547 don't store it back in that same slot. */
4548 if (entry_parm != 0
4549 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4550 stack_parm = 0;
4551
4552 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4553 in the mode in which it arrives.
4554 STACK_PARM is an RTX for a stack slot where the parameter can live
4555 during the function (in case we want to put it there).
4556 STACK_PARM is 0 if no stack slot was pushed for it.
4557
4558 Now output code if necessary to convert ENTRY_PARM to
4559 the type in which this function declares it,
4560 and store that result in an appropriate place,
4561 which may be a pseudo reg, may be STACK_PARM,
4562 or may be a local stack slot if STACK_PARM is 0.
4563
4564 Set DECL_RTL to that place. */
4565
4566 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4567 {
4568 /* If a BLKmode arrives in registers, copy it to a stack slot.
4569 Handle calls that pass values in multiple non-contiguous
4570 locations. The Irix 6 ABI has examples of this. */
4571 if (GET_CODE (entry_parm) == REG
4572 || GET_CODE (entry_parm) == PARALLEL)
4573 {
4574 int size_stored
4575 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4576 UNITS_PER_WORD);
4577
4578 /* Note that we will be storing an integral number of words.
4579 So we have to be careful to ensure that we allocate an
4580 integral number of words. We do this below in the
4581 assign_stack_local if space was not allocated in the argument
4582 list. If it was, this will not work if PARM_BOUNDARY is not
4583 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4584 if it becomes a problem. */
4585
4586 if (stack_parm == 0)
4587 {
4588 stack_parm
4589 = assign_stack_local (GET_MODE (entry_parm),
4590 size_stored, 0);
4591 set_mem_attributes (stack_parm, parm, 1);
4592 }
4593
4594 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4595 abort ();
4596
4597 /* Handle calls that pass values in multiple non-contiguous
4598 locations. The Irix 6 ABI has examples of this. */
4599 if (GET_CODE (entry_parm) == PARALLEL)
4600 emit_group_store (validize_mem (stack_parm), entry_parm,
4601 int_size_in_bytes (TREE_TYPE (parm)),
4602 TYPE_ALIGN (TREE_TYPE (parm)));
4603 else
4604 move_block_from_reg (REGNO (entry_parm),
4605 validize_mem (stack_parm),
4606 size_stored / UNITS_PER_WORD,
4607 int_size_in_bytes (TREE_TYPE (parm)));
4608 }
4609 DECL_RTL (parm) = stack_parm;
4610 }
4611 else if (! ((! optimize
4612 && ! DECL_REGISTER (parm)
4613 && ! DECL_INLINE (fndecl))
4614 || TREE_SIDE_EFFECTS (parm)
4615 /* If -ffloat-store specified, don't put explicit
4616 float variables into registers. */
4617 || (flag_float_store
4618 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4619 /* Always assign pseudo to structure return or item passed
4620 by invisible reference. */
4621 || passed_pointer || parm == function_result_decl)
4622 {
4623 /* Store the parm in a pseudoregister during the function, but we
4624 may need to do it in a wider mode. */
4625
4626 register rtx parmreg;
4627 unsigned int regno, regnoi = 0, regnor = 0;
4628
4629 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4630
4631 promoted_nominal_mode
4632 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4633
4634 parmreg = gen_reg_rtx (promoted_nominal_mode);
4635 mark_user_reg (parmreg);
4636
4637 /* If this was an item that we received a pointer to, set DECL_RTL
4638 appropriately. */
4639 if (passed_pointer)
4640 {
4641 DECL_RTL (parm)
4642 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4643 set_mem_attributes (DECL_RTL (parm), parm, 1);
4644 }
4645 else
4646 DECL_RTL (parm) = parmreg;
4647
4648 /* Copy the value into the register. */
4649 if (nominal_mode != passed_mode
4650 || promoted_nominal_mode != promoted_mode)
4651 {
4652 int save_tree_used;
4653 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4654 mode, by the caller. We now have to convert it to
4655 NOMINAL_MODE, if different. However, PARMREG may be in
4656 a different mode than NOMINAL_MODE if it is being stored
4657 promoted.
4658
4659 If ENTRY_PARM is a hard register, it might be in a register
4660 not valid for operating in its mode (e.g., an odd-numbered
4661 register for a DFmode). In that case, moves are the only
4662 thing valid, so we can't do a convert from there. This
4663 occurs when the calling sequence allow such misaligned
4664 usages.
4665
4666 In addition, the conversion may involve a call, which could
4667 clobber parameters which haven't been copied to pseudo
4668 registers yet. Therefore, we must first copy the parm to
4669 a pseudo reg here, and save the conversion until after all
4670 parameters have been moved. */
4671
4672 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4673
4674 emit_move_insn (tempreg, validize_mem (entry_parm));
4675
4676 push_to_sequence (conversion_insns);
4677 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4678
4679 /* TREE_USED gets set erroneously during expand_assignment. */
4680 save_tree_used = TREE_USED (parm);
4681 expand_assignment (parm,
4682 make_tree (nominal_type, tempreg), 0, 0);
4683 TREE_USED (parm) = save_tree_used;
4684 conversion_insns = get_insns ();
4685 did_conversion = 1;
4686 end_sequence ();
4687 }
4688 else
4689 emit_move_insn (parmreg, validize_mem (entry_parm));
4690
4691 /* If we were passed a pointer but the actual value
4692 can safely live in a register, put it in one. */
4693 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4694 && ! ((! optimize
4695 && ! DECL_REGISTER (parm)
4696 && ! DECL_INLINE (fndecl))
4697 || TREE_SIDE_EFFECTS (parm)
4698 /* If -ffloat-store specified, don't put explicit
4699 float variables into registers. */
4700 || (flag_float_store
4701 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4702 {
4703 /* We can't use nominal_mode, because it will have been set to
4704 Pmode above. We must use the actual mode of the parm. */
4705 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4706 mark_user_reg (parmreg);
4707 emit_move_insn (parmreg, DECL_RTL (parm));
4708 DECL_RTL (parm) = parmreg;
4709 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4710 now the parm. */
4711 stack_parm = 0;
4712 }
4713 #ifdef FUNCTION_ARG_CALLEE_COPIES
4714 /* If we are passed an arg by reference and it is our responsibility
4715 to make a copy, do it now.
4716 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4717 original argument, so we must recreate them in the call to
4718 FUNCTION_ARG_CALLEE_COPIES. */
4719 /* ??? Later add code to handle the case that if the argument isn't
4720 modified, don't do the copy. */
4721
4722 else if (passed_pointer
4723 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4724 TYPE_MODE (DECL_ARG_TYPE (parm)),
4725 DECL_ARG_TYPE (parm),
4726 named_arg)
4727 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4728 {
4729 rtx copy;
4730 tree type = DECL_ARG_TYPE (parm);
4731
4732 /* This sequence may involve a library call perhaps clobbering
4733 registers that haven't been copied to pseudos yet. */
4734
4735 push_to_sequence (conversion_insns);
4736
4737 if (!COMPLETE_TYPE_P (type)
4738 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4739 /* This is a variable sized object. */
4740 copy = gen_rtx_MEM (BLKmode,
4741 allocate_dynamic_stack_space
4742 (expr_size (parm), NULL_RTX,
4743 TYPE_ALIGN (type)));
4744 else
4745 copy = assign_stack_temp (TYPE_MODE (type),
4746 int_size_in_bytes (type), 1);
4747 set_mem_attributes (copy, parm, 1);
4748
4749 store_expr (parm, copy, 0);
4750 emit_move_insn (parmreg, XEXP (copy, 0));
4751 if (current_function_check_memory_usage)
4752 emit_library_call (chkr_set_right_libfunc,
4753 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4754 XEXP (copy, 0), Pmode,
4755 GEN_INT (int_size_in_bytes (type)),
4756 TYPE_MODE (sizetype),
4757 GEN_INT (MEMORY_USE_RW),
4758 TYPE_MODE (integer_type_node));
4759 conversion_insns = get_insns ();
4760 did_conversion = 1;
4761 end_sequence ();
4762 }
4763 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4764
4765 /* In any case, record the parm's desired stack location
4766 in case we later discover it must live in the stack.
4767
4768 If it is a COMPLEX value, store the stack location for both
4769 halves. */
4770
4771 if (GET_CODE (parmreg) == CONCAT)
4772 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4773 else
4774 regno = REGNO (parmreg);
4775
4776 if (regno >= max_parm_reg)
4777 {
4778 rtx *new;
4779 int old_max_parm_reg = max_parm_reg;
4780
4781 /* It's slow to expand this one register at a time,
4782 but it's also rare and we need max_parm_reg to be
4783 precisely correct. */
4784 max_parm_reg = regno + 1;
4785 new = (rtx *) xrealloc (parm_reg_stack_loc,
4786 max_parm_reg * sizeof (rtx));
4787 memset ((char *) (new + old_max_parm_reg), 0,
4788 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4789 parm_reg_stack_loc = new;
4790 }
4791
4792 if (GET_CODE (parmreg) == CONCAT)
4793 {
4794 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4795
4796 regnor = REGNO (gen_realpart (submode, parmreg));
4797 regnoi = REGNO (gen_imagpart (submode, parmreg));
4798
4799 if (stack_parm != 0)
4800 {
4801 parm_reg_stack_loc[regnor]
4802 = gen_realpart (submode, stack_parm);
4803 parm_reg_stack_loc[regnoi]
4804 = gen_imagpart (submode, stack_parm);
4805 }
4806 else
4807 {
4808 parm_reg_stack_loc[regnor] = 0;
4809 parm_reg_stack_loc[regnoi] = 0;
4810 }
4811 }
4812 else
4813 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4814
4815 /* Mark the register as eliminable if we did no conversion
4816 and it was copied from memory at a fixed offset,
4817 and the arg pointer was not copied to a pseudo-reg.
4818 If the arg pointer is a pseudo reg or the offset formed
4819 an invalid address, such memory-equivalences
4820 as we make here would screw up life analysis for it. */
4821 if (nominal_mode == passed_mode
4822 && ! did_conversion
4823 && stack_parm != 0
4824 && GET_CODE (stack_parm) == MEM
4825 && stack_offset.var == 0
4826 && reg_mentioned_p (virtual_incoming_args_rtx,
4827 XEXP (stack_parm, 0)))
4828 {
4829 rtx linsn = get_last_insn ();
4830 rtx sinsn, set;
4831
4832 /* Mark complex types separately. */
4833 if (GET_CODE (parmreg) == CONCAT)
4834 /* Scan backwards for the set of the real and
4835 imaginary parts. */
4836 for (sinsn = linsn; sinsn != 0;
4837 sinsn = prev_nonnote_insn (sinsn))
4838 {
4839 set = single_set (sinsn);
4840 if (set != 0
4841 && SET_DEST (set) == regno_reg_rtx [regnoi])
4842 REG_NOTES (sinsn)
4843 = gen_rtx_EXPR_LIST (REG_EQUIV,
4844 parm_reg_stack_loc[regnoi],
4845 REG_NOTES (sinsn));
4846 else if (set != 0
4847 && SET_DEST (set) == regno_reg_rtx [regnor])
4848 REG_NOTES (sinsn)
4849 = gen_rtx_EXPR_LIST (REG_EQUIV,
4850 parm_reg_stack_loc[regnor],
4851 REG_NOTES (sinsn));
4852 }
4853 else if ((set = single_set (linsn)) != 0
4854 && SET_DEST (set) == parmreg)
4855 REG_NOTES (linsn)
4856 = gen_rtx_EXPR_LIST (REG_EQUIV,
4857 stack_parm, REG_NOTES (linsn));
4858 }
4859
4860 /* For pointer data type, suggest pointer register. */
4861 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4862 mark_reg_pointer (parmreg,
4863 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4864
4865 /* If something wants our address, try to use ADDRESSOF. */
4866 if (TREE_ADDRESSABLE (parm))
4867 {
4868 /* If we end up putting something into the stack,
4869 fixup_var_refs_insns will need to make a pass over
4870 all the instructions. It looks throughs the pending
4871 sequences -- but it can't see the ones in the
4872 CONVERSION_INSNS, if they're not on the sequence
4873 stack. So, we go back to that sequence, just so that
4874 the fixups will happen. */
4875 push_to_sequence (conversion_insns);
4876 put_var_into_stack (parm);
4877 conversion_insns = get_insns ();
4878 end_sequence ();
4879 }
4880 }
4881 else
4882 {
4883 /* Value must be stored in the stack slot STACK_PARM
4884 during function execution. */
4885
4886 if (promoted_mode != nominal_mode)
4887 {
4888 /* Conversion is required. */
4889 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4890
4891 emit_move_insn (tempreg, validize_mem (entry_parm));
4892
4893 push_to_sequence (conversion_insns);
4894 entry_parm = convert_to_mode (nominal_mode, tempreg,
4895 TREE_UNSIGNED (TREE_TYPE (parm)));
4896 if (stack_parm)
4897 {
4898 /* ??? This may need a big-endian conversion on sparc64. */
4899 stack_parm = change_address (stack_parm, nominal_mode,
4900 NULL_RTX);
4901 }
4902 conversion_insns = get_insns ();
4903 did_conversion = 1;
4904 end_sequence ();
4905 }
4906
4907 if (entry_parm != stack_parm)
4908 {
4909 if (stack_parm == 0)
4910 {
4911 stack_parm
4912 = assign_stack_local (GET_MODE (entry_parm),
4913 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4914 set_mem_attributes (stack_parm, parm, 1);
4915 }
4916
4917 if (promoted_mode != nominal_mode)
4918 {
4919 push_to_sequence (conversion_insns);
4920 emit_move_insn (validize_mem (stack_parm),
4921 validize_mem (entry_parm));
4922 conversion_insns = get_insns ();
4923 end_sequence ();
4924 }
4925 else
4926 emit_move_insn (validize_mem (stack_parm),
4927 validize_mem (entry_parm));
4928 }
4929 if (current_function_check_memory_usage)
4930 {
4931 push_to_sequence (conversion_insns);
4932 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK,
4933 VOIDmode, 3, XEXP (stack_parm, 0), Pmode,
4934 GEN_INT (GET_MODE_SIZE (GET_MODE
4935 (entry_parm))),
4936 TYPE_MODE (sizetype),
4937 GEN_INT (MEMORY_USE_RW),
4938 TYPE_MODE (integer_type_node));
4939
4940 conversion_insns = get_insns ();
4941 end_sequence ();
4942 }
4943 DECL_RTL (parm) = stack_parm;
4944 }
4945
4946 /* If this "parameter" was the place where we are receiving the
4947 function's incoming structure pointer, set up the result. */
4948 if (parm == function_result_decl)
4949 {
4950 tree result = DECL_RESULT (fndecl);
4951
4952 DECL_RTL (result)
4953 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4954
4955 set_mem_attributes (DECL_RTL (result), result, 1);
4956 }
4957 }
4958
4959 /* Output all parameter conversion instructions (possibly including calls)
4960 now that all parameters have been copied out of hard registers. */
4961 emit_insns (conversion_insns);
4962
4963 last_parm_insn = get_last_insn ();
4964
4965 current_function_args_size = stack_args_size.constant;
4966
4967 /* Adjust function incoming argument size for alignment and
4968 minimum length. */
4969
4970 #ifdef REG_PARM_STACK_SPACE
4971 #ifndef MAYBE_REG_PARM_STACK_SPACE
4972 current_function_args_size = MAX (current_function_args_size,
4973 REG_PARM_STACK_SPACE (fndecl));
4974 #endif
4975 #endif
4976
4977 #ifdef STACK_BOUNDARY
4978 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4979
4980 current_function_args_size
4981 = ((current_function_args_size + STACK_BYTES - 1)
4982 / STACK_BYTES) * STACK_BYTES;
4983 #endif
4984
4985 #ifdef ARGS_GROW_DOWNWARD
4986 current_function_arg_offset_rtx
4987 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4988 : expand_expr (size_diffop (stack_args_size.var,
4989 size_int (-stack_args_size.constant)),
4990 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4991 #else
4992 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4993 #endif
4994
4995 /* See how many bytes, if any, of its args a function should try to pop
4996 on return. */
4997
4998 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4999 current_function_args_size);
5000
5001 /* For stdarg.h function, save info about
5002 regs and stack space used by the named args. */
5003
5004 if (!hide_last_arg)
5005 current_function_args_info = args_so_far;
5006
5007 /* Set the rtx used for the function return value. Put this in its
5008 own variable so any optimizers that need this information don't have
5009 to include tree.h. Do this here so it gets done when an inlined
5010 function gets output. */
5011
5012 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
5013 }
5014 \f
5015 /* Indicate whether REGNO is an incoming argument to the current function
5016 that was promoted to a wider mode. If so, return the RTX for the
5017 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
5018 that REGNO is promoted from and whether the promotion was signed or
5019 unsigned. */
5020
5021 #ifdef PROMOTE_FUNCTION_ARGS
5022
5023 rtx
5024 promoted_input_arg (regno, pmode, punsignedp)
5025 unsigned int regno;
5026 enum machine_mode *pmode;
5027 int *punsignedp;
5028 {
5029 tree arg;
5030
5031 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5032 arg = TREE_CHAIN (arg))
5033 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5034 && REGNO (DECL_INCOMING_RTL (arg)) == regno
5035 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5036 {
5037 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5038 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5039
5040 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5041 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5042 && mode != DECL_MODE (arg))
5043 {
5044 *pmode = DECL_MODE (arg);
5045 *punsignedp = unsignedp;
5046 return DECL_INCOMING_RTL (arg);
5047 }
5048 }
5049
5050 return 0;
5051 }
5052
5053 #endif
5054 \f
5055 /* Compute the size and offset from the start of the stacked arguments for a
5056 parm passed in mode PASSED_MODE and with type TYPE.
5057
5058 INITIAL_OFFSET_PTR points to the current offset into the stacked
5059 arguments.
5060
5061 The starting offset and size for this parm are returned in *OFFSET_PTR
5062 and *ARG_SIZE_PTR, respectively.
5063
5064 IN_REGS is non-zero if the argument will be passed in registers. It will
5065 never be set if REG_PARM_STACK_SPACE is not defined.
5066
5067 FNDECL is the function in which the argument was defined.
5068
5069 There are two types of rounding that are done. The first, controlled by
5070 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5071 list to be aligned to the specific boundary (in bits). This rounding
5072 affects the initial and starting offsets, but not the argument size.
5073
5074 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5075 optionally rounds the size of the parm to PARM_BOUNDARY. The
5076 initial offset is not affected by this rounding, while the size always
5077 is and the starting offset may be. */
5078
5079 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5080 initial_offset_ptr is positive because locate_and_pad_parm's
5081 callers pass in the total size of args so far as
5082 initial_offset_ptr. arg_size_ptr is always positive.*/
5083
5084 void
5085 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5086 initial_offset_ptr, offset_ptr, arg_size_ptr,
5087 alignment_pad)
5088 enum machine_mode passed_mode;
5089 tree type;
5090 int in_regs ATTRIBUTE_UNUSED;
5091 tree fndecl ATTRIBUTE_UNUSED;
5092 struct args_size *initial_offset_ptr;
5093 struct args_size *offset_ptr;
5094 struct args_size *arg_size_ptr;
5095 struct args_size *alignment_pad;
5096
5097 {
5098 tree sizetree
5099 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5100 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5101 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5102
5103 #ifdef REG_PARM_STACK_SPACE
5104 /* If we have found a stack parm before we reach the end of the
5105 area reserved for registers, skip that area. */
5106 if (! in_regs)
5107 {
5108 int reg_parm_stack_space = 0;
5109
5110 #ifdef MAYBE_REG_PARM_STACK_SPACE
5111 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5112 #else
5113 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5114 #endif
5115 if (reg_parm_stack_space > 0)
5116 {
5117 if (initial_offset_ptr->var)
5118 {
5119 initial_offset_ptr->var
5120 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5121 ssize_int (reg_parm_stack_space));
5122 initial_offset_ptr->constant = 0;
5123 }
5124 else if (initial_offset_ptr->constant < reg_parm_stack_space)
5125 initial_offset_ptr->constant = reg_parm_stack_space;
5126 }
5127 }
5128 #endif /* REG_PARM_STACK_SPACE */
5129
5130 arg_size_ptr->var = 0;
5131 arg_size_ptr->constant = 0;
5132 alignment_pad->var = 0;
5133 alignment_pad->constant = 0;
5134
5135 #ifdef ARGS_GROW_DOWNWARD
5136 if (initial_offset_ptr->var)
5137 {
5138 offset_ptr->constant = 0;
5139 offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5140 initial_offset_ptr->var);
5141 }
5142 else
5143 {
5144 offset_ptr->constant = -initial_offset_ptr->constant;
5145 offset_ptr->var = 0;
5146 }
5147 if (where_pad != none
5148 && (!host_integerp (sizetree, 1)
5149 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5150 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5151 SUB_PARM_SIZE (*offset_ptr, sizetree);
5152 if (where_pad != downward)
5153 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5154 if (initial_offset_ptr->var)
5155 arg_size_ptr->var = size_binop (MINUS_EXPR,
5156 size_binop (MINUS_EXPR,
5157 ssize_int (0),
5158 initial_offset_ptr->var),
5159 offset_ptr->var);
5160
5161 else
5162 arg_size_ptr->constant = (-initial_offset_ptr->constant
5163 - offset_ptr->constant);
5164
5165 #else /* !ARGS_GROW_DOWNWARD */
5166 if (!in_regs
5167 #ifdef REG_PARM_STACK_SPACE
5168 || REG_PARM_STACK_SPACE (fndecl) > 0
5169 #endif
5170 )
5171 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5172 *offset_ptr = *initial_offset_ptr;
5173
5174 #ifdef PUSH_ROUNDING
5175 if (passed_mode != BLKmode)
5176 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5177 #endif
5178
5179 /* Pad_below needs the pre-rounded size to know how much to pad below
5180 so this must be done before rounding up. */
5181 if (where_pad == downward
5182 /* However, BLKmode args passed in regs have their padding done elsewhere.
5183 The stack slot must be able to hold the entire register. */
5184 && !(in_regs && passed_mode == BLKmode))
5185 pad_below (offset_ptr, passed_mode, sizetree);
5186
5187 if (where_pad != none
5188 && (!host_integerp (sizetree, 1)
5189 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5190 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5191
5192 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5193 #endif /* ARGS_GROW_DOWNWARD */
5194 }
5195
5196 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5197 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5198
5199 static void
5200 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5201 struct args_size *offset_ptr;
5202 int boundary;
5203 struct args_size *alignment_pad;
5204 {
5205 tree save_var = NULL_TREE;
5206 HOST_WIDE_INT save_constant = 0;
5207
5208 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5209
5210 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5211 {
5212 save_var = offset_ptr->var;
5213 save_constant = offset_ptr->constant;
5214 }
5215
5216 alignment_pad->var = NULL_TREE;
5217 alignment_pad->constant = 0;
5218
5219 if (boundary > BITS_PER_UNIT)
5220 {
5221 if (offset_ptr->var)
5222 {
5223 offset_ptr->var =
5224 #ifdef ARGS_GROW_DOWNWARD
5225 round_down
5226 #else
5227 round_up
5228 #endif
5229 (ARGS_SIZE_TREE (*offset_ptr),
5230 boundary / BITS_PER_UNIT);
5231 offset_ptr->constant = 0; /*?*/
5232 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5233 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5234 save_var);
5235 }
5236 else
5237 {
5238 offset_ptr->constant =
5239 #ifdef ARGS_GROW_DOWNWARD
5240 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5241 #else
5242 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5243 #endif
5244 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5245 alignment_pad->constant = offset_ptr->constant - save_constant;
5246 }
5247 }
5248 }
5249
5250 #ifndef ARGS_GROW_DOWNWARD
5251 static void
5252 pad_below (offset_ptr, passed_mode, sizetree)
5253 struct args_size *offset_ptr;
5254 enum machine_mode passed_mode;
5255 tree sizetree;
5256 {
5257 if (passed_mode != BLKmode)
5258 {
5259 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5260 offset_ptr->constant
5261 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5262 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5263 - GET_MODE_SIZE (passed_mode));
5264 }
5265 else
5266 {
5267 if (TREE_CODE (sizetree) != INTEGER_CST
5268 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5269 {
5270 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5271 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5272 /* Add it in. */
5273 ADD_PARM_SIZE (*offset_ptr, s2);
5274 SUB_PARM_SIZE (*offset_ptr, sizetree);
5275 }
5276 }
5277 }
5278 #endif
5279 \f
5280 /* Walk the tree of blocks describing the binding levels within a function
5281 and warn about uninitialized variables.
5282 This is done after calling flow_analysis and before global_alloc
5283 clobbers the pseudo-regs to hard regs. */
5284
5285 void
5286 uninitialized_vars_warning (block)
5287 tree block;
5288 {
5289 register tree decl, sub;
5290 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5291 {
5292 if (warn_uninitialized
5293 && TREE_CODE (decl) == VAR_DECL
5294 /* These warnings are unreliable for and aggregates
5295 because assigning the fields one by one can fail to convince
5296 flow.c that the entire aggregate was initialized.
5297 Unions are troublesome because members may be shorter. */
5298 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5299 && DECL_RTL (decl) != 0
5300 && GET_CODE (DECL_RTL (decl)) == REG
5301 /* Global optimizations can make it difficult to determine if a
5302 particular variable has been initialized. However, a VAR_DECL
5303 with a nonzero DECL_INITIAL had an initializer, so do not
5304 claim it is potentially uninitialized.
5305
5306 We do not care about the actual value in DECL_INITIAL, so we do
5307 not worry that it may be a dangling pointer. */
5308 && DECL_INITIAL (decl) == NULL_TREE
5309 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5310 warning_with_decl (decl,
5311 "`%s' might be used uninitialized in this function");
5312 if (extra_warnings
5313 && TREE_CODE (decl) == VAR_DECL
5314 && DECL_RTL (decl) != 0
5315 && GET_CODE (DECL_RTL (decl)) == REG
5316 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5317 warning_with_decl (decl,
5318 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5319 }
5320 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5321 uninitialized_vars_warning (sub);
5322 }
5323
5324 /* Do the appropriate part of uninitialized_vars_warning
5325 but for arguments instead of local variables. */
5326
5327 void
5328 setjmp_args_warning ()
5329 {
5330 register tree decl;
5331 for (decl = DECL_ARGUMENTS (current_function_decl);
5332 decl; decl = TREE_CHAIN (decl))
5333 if (DECL_RTL (decl) != 0
5334 && GET_CODE (DECL_RTL (decl)) == REG
5335 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5336 warning_with_decl (decl,
5337 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5338 }
5339
5340 /* If this function call setjmp, put all vars into the stack
5341 unless they were declared `register'. */
5342
5343 void
5344 setjmp_protect (block)
5345 tree block;
5346 {
5347 register tree decl, sub;
5348 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5349 if ((TREE_CODE (decl) == VAR_DECL
5350 || TREE_CODE (decl) == PARM_DECL)
5351 && DECL_RTL (decl) != 0
5352 && (GET_CODE (DECL_RTL (decl)) == REG
5353 || (GET_CODE (DECL_RTL (decl)) == MEM
5354 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5355 /* If this variable came from an inline function, it must be
5356 that its life doesn't overlap the setjmp. If there was a
5357 setjmp in the function, it would already be in memory. We
5358 must exclude such variable because their DECL_RTL might be
5359 set to strange things such as virtual_stack_vars_rtx. */
5360 && ! DECL_FROM_INLINE (decl)
5361 && (
5362 #ifdef NON_SAVING_SETJMP
5363 /* If longjmp doesn't restore the registers,
5364 don't put anything in them. */
5365 NON_SAVING_SETJMP
5366 ||
5367 #endif
5368 ! DECL_REGISTER (decl)))
5369 put_var_into_stack (decl);
5370 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5371 setjmp_protect (sub);
5372 }
5373 \f
5374 /* Like the previous function, but for args instead of local variables. */
5375
5376 void
5377 setjmp_protect_args ()
5378 {
5379 register tree decl;
5380 for (decl = DECL_ARGUMENTS (current_function_decl);
5381 decl; decl = TREE_CHAIN (decl))
5382 if ((TREE_CODE (decl) == VAR_DECL
5383 || TREE_CODE (decl) == PARM_DECL)
5384 && DECL_RTL (decl) != 0
5385 && (GET_CODE (DECL_RTL (decl)) == REG
5386 || (GET_CODE (DECL_RTL (decl)) == MEM
5387 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5388 && (
5389 /* If longjmp doesn't restore the registers,
5390 don't put anything in them. */
5391 #ifdef NON_SAVING_SETJMP
5392 NON_SAVING_SETJMP
5393 ||
5394 #endif
5395 ! DECL_REGISTER (decl)))
5396 put_var_into_stack (decl);
5397 }
5398 \f
5399 /* Return the context-pointer register corresponding to DECL,
5400 or 0 if it does not need one. */
5401
5402 rtx
5403 lookup_static_chain (decl)
5404 tree decl;
5405 {
5406 tree context = decl_function_context (decl);
5407 tree link;
5408
5409 if (context == 0
5410 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5411 return 0;
5412
5413 /* We treat inline_function_decl as an alias for the current function
5414 because that is the inline function whose vars, types, etc.
5415 are being merged into the current function.
5416 See expand_inline_function. */
5417 if (context == current_function_decl || context == inline_function_decl)
5418 return virtual_stack_vars_rtx;
5419
5420 for (link = context_display; link; link = TREE_CHAIN (link))
5421 if (TREE_PURPOSE (link) == context)
5422 return RTL_EXPR_RTL (TREE_VALUE (link));
5423
5424 abort ();
5425 }
5426 \f
5427 /* Convert a stack slot address ADDR for variable VAR
5428 (from a containing function)
5429 into an address valid in this function (using a static chain). */
5430
5431 rtx
5432 fix_lexical_addr (addr, var)
5433 rtx addr;
5434 tree var;
5435 {
5436 rtx basereg;
5437 HOST_WIDE_INT displacement;
5438 tree context = decl_function_context (var);
5439 struct function *fp;
5440 rtx base = 0;
5441
5442 /* If this is the present function, we need not do anything. */
5443 if (context == current_function_decl || context == inline_function_decl)
5444 return addr;
5445
5446 for (fp = outer_function_chain; fp; fp = fp->next)
5447 if (fp->decl == context)
5448 break;
5449
5450 if (fp == 0)
5451 abort ();
5452
5453 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5454 addr = XEXP (XEXP (addr, 0), 0);
5455
5456 /* Decode given address as base reg plus displacement. */
5457 if (GET_CODE (addr) == REG)
5458 basereg = addr, displacement = 0;
5459 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5460 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5461 else
5462 abort ();
5463
5464 /* We accept vars reached via the containing function's
5465 incoming arg pointer and via its stack variables pointer. */
5466 if (basereg == fp->internal_arg_pointer)
5467 {
5468 /* If reached via arg pointer, get the arg pointer value
5469 out of that function's stack frame.
5470
5471 There are two cases: If a separate ap is needed, allocate a
5472 slot in the outer function for it and dereference it that way.
5473 This is correct even if the real ap is actually a pseudo.
5474 Otherwise, just adjust the offset from the frame pointer to
5475 compensate. */
5476
5477 #ifdef NEED_SEPARATE_AP
5478 rtx addr;
5479
5480 if (fp->x_arg_pointer_save_area == 0)
5481 fp->x_arg_pointer_save_area
5482 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5483
5484 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5485 addr = memory_address (Pmode, addr);
5486
5487 base = gen_rtx_MEM (Pmode, addr);
5488 MEM_ALIAS_SET (base) = get_frame_alias_set ();
5489 base = copy_to_reg (base);
5490 #else
5491 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5492 base = lookup_static_chain (var);
5493 #endif
5494 }
5495
5496 else if (basereg == virtual_stack_vars_rtx)
5497 {
5498 /* This is the same code as lookup_static_chain, duplicated here to
5499 avoid an extra call to decl_function_context. */
5500 tree link;
5501
5502 for (link = context_display; link; link = TREE_CHAIN (link))
5503 if (TREE_PURPOSE (link) == context)
5504 {
5505 base = RTL_EXPR_RTL (TREE_VALUE (link));
5506 break;
5507 }
5508 }
5509
5510 if (base == 0)
5511 abort ();
5512
5513 /* Use same offset, relative to appropriate static chain or argument
5514 pointer. */
5515 return plus_constant (base, displacement);
5516 }
5517 \f
5518 /* Return the address of the trampoline for entering nested fn FUNCTION.
5519 If necessary, allocate a trampoline (in the stack frame)
5520 and emit rtl to initialize its contents (at entry to this function). */
5521
5522 rtx
5523 trampoline_address (function)
5524 tree function;
5525 {
5526 tree link;
5527 tree rtlexp;
5528 rtx tramp;
5529 struct function *fp;
5530 tree fn_context;
5531
5532 /* Find an existing trampoline and return it. */
5533 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5534 if (TREE_PURPOSE (link) == function)
5535 return
5536 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5537
5538 for (fp = outer_function_chain; fp; fp = fp->next)
5539 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5540 if (TREE_PURPOSE (link) == function)
5541 {
5542 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5543 function);
5544 return round_trampoline_addr (tramp);
5545 }
5546
5547 /* None exists; we must make one. */
5548
5549 /* Find the `struct function' for the function containing FUNCTION. */
5550 fp = 0;
5551 fn_context = decl_function_context (function);
5552 if (fn_context != current_function_decl
5553 && fn_context != inline_function_decl)
5554 for (fp = outer_function_chain; fp; fp = fp->next)
5555 if (fp->decl == fn_context)
5556 break;
5557
5558 /* Allocate run-time space for this trampoline
5559 (usually in the defining function's stack frame). */
5560 #ifdef ALLOCATE_TRAMPOLINE
5561 tramp = ALLOCATE_TRAMPOLINE (fp);
5562 #else
5563 /* If rounding needed, allocate extra space
5564 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5565 #ifdef TRAMPOLINE_ALIGNMENT
5566 #define TRAMPOLINE_REAL_SIZE \
5567 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5568 #else
5569 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5570 #endif
5571 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5572 fp ? fp : cfun);
5573 #endif
5574
5575 /* Record the trampoline for reuse and note it for later initialization
5576 by expand_function_end. */
5577 if (fp != 0)
5578 {
5579 rtlexp = make_node (RTL_EXPR);
5580 RTL_EXPR_RTL (rtlexp) = tramp;
5581 fp->x_trampoline_list = tree_cons (function, rtlexp,
5582 fp->x_trampoline_list);
5583 }
5584 else
5585 {
5586 /* Make the RTL_EXPR node temporary, not momentary, so that the
5587 trampoline_list doesn't become garbage. */
5588 rtlexp = make_node (RTL_EXPR);
5589
5590 RTL_EXPR_RTL (rtlexp) = tramp;
5591 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5592 }
5593
5594 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5595 return round_trampoline_addr (tramp);
5596 }
5597
5598 /* Given a trampoline address,
5599 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5600
5601 static rtx
5602 round_trampoline_addr (tramp)
5603 rtx tramp;
5604 {
5605 #ifdef TRAMPOLINE_ALIGNMENT
5606 /* Round address up to desired boundary. */
5607 rtx temp = gen_reg_rtx (Pmode);
5608 temp = expand_binop (Pmode, add_optab, tramp,
5609 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5610 temp, 0, OPTAB_LIB_WIDEN);
5611 tramp = expand_binop (Pmode, and_optab, temp,
5612 GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5613 temp, 0, OPTAB_LIB_WIDEN);
5614 #endif
5615 return tramp;
5616 }
5617 \f
5618 /* Put all this function's BLOCK nodes including those that are chained
5619 onto the first block into a vector, and return it.
5620 Also store in each NOTE for the beginning or end of a block
5621 the index of that block in the vector.
5622 The arguments are BLOCK, the chain of top-level blocks of the function,
5623 and INSNS, the insn chain of the function. */
5624
5625 void
5626 identify_blocks ()
5627 {
5628 int n_blocks;
5629 tree *block_vector, *last_block_vector;
5630 tree *block_stack;
5631 tree block = DECL_INITIAL (current_function_decl);
5632
5633 if (block == 0)
5634 return;
5635
5636 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5637 depth-first order. */
5638 block_vector = get_block_vector (block, &n_blocks);
5639 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5640
5641 last_block_vector = identify_blocks_1 (get_insns (),
5642 block_vector + 1,
5643 block_vector + n_blocks,
5644 block_stack);
5645
5646 /* If we didn't use all of the subblocks, we've misplaced block notes. */
5647 /* ??? This appears to happen all the time. Latent bugs elsewhere? */
5648 if (0 && last_block_vector != block_vector + n_blocks)
5649 abort ();
5650
5651 free (block_vector);
5652 free (block_stack);
5653 }
5654
5655 /* Subroutine of identify_blocks. Do the block substitution on the
5656 insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
5657
5658 BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5659 BLOCK_VECTOR is incremented for each block seen. */
5660
5661 static tree *
5662 identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5663 rtx insns;
5664 tree *block_vector;
5665 tree *end_block_vector;
5666 tree *orig_block_stack;
5667 {
5668 rtx insn;
5669 tree *block_stack = orig_block_stack;
5670
5671 for (insn = insns; insn; insn = NEXT_INSN (insn))
5672 {
5673 if (GET_CODE (insn) == NOTE)
5674 {
5675 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5676 {
5677 tree b;
5678
5679 /* If there are more block notes than BLOCKs, something
5680 is badly wrong. */
5681 if (block_vector == end_block_vector)
5682 abort ();
5683
5684 b = *block_vector++;
5685 NOTE_BLOCK (insn) = b;
5686 *block_stack++ = b;
5687 }
5688 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5689 {
5690 /* If there are more NOTE_INSN_BLOCK_ENDs than
5691 NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
5692 if (block_stack == orig_block_stack)
5693 abort ();
5694
5695 NOTE_BLOCK (insn) = *--block_stack;
5696 }
5697 }
5698 else if (GET_CODE (insn) == CALL_INSN
5699 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5700 {
5701 rtx cp = PATTERN (insn);
5702
5703 block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5704 end_block_vector, block_stack);
5705 if (XEXP (cp, 1))
5706 block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5707 end_block_vector, block_stack);
5708 if (XEXP (cp, 2))
5709 block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5710 end_block_vector, block_stack);
5711 }
5712 }
5713
5714 /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5715 something is badly wrong. */
5716 if (block_stack != orig_block_stack)
5717 abort ();
5718
5719 return block_vector;
5720 }
5721
5722 /* Identify BLOCKs referenced by more than one
5723 NOTE_INSN_BLOCK_{BEG,END}, and create duplicate blocks. */
5724
5725 void
5726 reorder_blocks ()
5727 {
5728 tree block = DECL_INITIAL (current_function_decl);
5729 varray_type block_stack;
5730
5731 if (block == NULL_TREE)
5732 return;
5733
5734 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
5735
5736 /* Prune the old trees away, so that they don't get in the way. */
5737 BLOCK_SUBBLOCKS (block) = NULL_TREE;
5738 BLOCK_CHAIN (block) = NULL_TREE;
5739
5740 reorder_blocks_1 (get_insns (), block, &block_stack);
5741
5742 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
5743
5744 VARRAY_FREE (block_stack);
5745 }
5746
5747 /* Helper function for reorder_blocks. Process the insn chain beginning
5748 at INSNS. Recurse for CALL_PLACEHOLDER insns. */
5749
5750 static void
5751 reorder_blocks_1 (insns, current_block, p_block_stack)
5752 rtx insns;
5753 tree current_block;
5754 varray_type *p_block_stack;
5755 {
5756 rtx insn;
5757
5758 for (insn = insns; insn; insn = NEXT_INSN (insn))
5759 {
5760 if (GET_CODE (insn) == NOTE)
5761 {
5762 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5763 {
5764 tree block = NOTE_BLOCK (insn);
5765 /* If we have seen this block before, copy it. */
5766 if (TREE_ASM_WRITTEN (block))
5767 {
5768 block = copy_node (block);
5769 NOTE_BLOCK (insn) = block;
5770 }
5771 BLOCK_SUBBLOCKS (block) = 0;
5772 TREE_ASM_WRITTEN (block) = 1;
5773 BLOCK_SUPERCONTEXT (block) = current_block;
5774 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5775 BLOCK_SUBBLOCKS (current_block) = block;
5776 current_block = block;
5777 VARRAY_PUSH_TREE (*p_block_stack, block);
5778 }
5779 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5780 {
5781 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
5782 VARRAY_POP (*p_block_stack);
5783 BLOCK_SUBBLOCKS (current_block)
5784 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5785 current_block = BLOCK_SUPERCONTEXT (current_block);
5786 }
5787 }
5788 else if (GET_CODE (insn) == CALL_INSN
5789 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5790 {
5791 rtx cp = PATTERN (insn);
5792 reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
5793 if (XEXP (cp, 1))
5794 reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
5795 if (XEXP (cp, 2))
5796 reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
5797 }
5798 }
5799 }
5800
5801 /* Reverse the order of elements in the chain T of blocks,
5802 and return the new head of the chain (old last element). */
5803
5804 static tree
5805 blocks_nreverse (t)
5806 tree t;
5807 {
5808 register tree prev = 0, decl, next;
5809 for (decl = t; decl; decl = next)
5810 {
5811 next = BLOCK_CHAIN (decl);
5812 BLOCK_CHAIN (decl) = prev;
5813 prev = decl;
5814 }
5815 return prev;
5816 }
5817
5818 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
5819 non-NULL, list them all into VECTOR, in a depth-first preorder
5820 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
5821 blocks. */
5822
5823 static int
5824 all_blocks (block, vector)
5825 tree block;
5826 tree *vector;
5827 {
5828 int n_blocks = 0;
5829
5830 while (block)
5831 {
5832 TREE_ASM_WRITTEN (block) = 0;
5833
5834 /* Record this block. */
5835 if (vector)
5836 vector[n_blocks] = block;
5837
5838 ++n_blocks;
5839
5840 /* Record the subblocks, and their subblocks... */
5841 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5842 vector ? vector + n_blocks : 0);
5843 block = BLOCK_CHAIN (block);
5844 }
5845
5846 return n_blocks;
5847 }
5848
5849 /* Return a vector containing all the blocks rooted at BLOCK. The
5850 number of elements in the vector is stored in N_BLOCKS_P. The
5851 vector is dynamically allocated; it is the caller's responsibility
5852 to call `free' on the pointer returned. */
5853
5854 static tree *
5855 get_block_vector (block, n_blocks_p)
5856 tree block;
5857 int *n_blocks_p;
5858 {
5859 tree *block_vector;
5860
5861 *n_blocks_p = all_blocks (block, NULL);
5862 block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
5863 all_blocks (block, block_vector);
5864
5865 return block_vector;
5866 }
5867
5868 static int next_block_index = 2;
5869
5870 /* Set BLOCK_NUMBER for all the blocks in FN. */
5871
5872 void
5873 number_blocks (fn)
5874 tree fn;
5875 {
5876 int i;
5877 int n_blocks;
5878 tree *block_vector;
5879
5880 /* For SDB and XCOFF debugging output, we start numbering the blocks
5881 from 1 within each function, rather than keeping a running
5882 count. */
5883 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
5884 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
5885 next_block_index = 1;
5886 #endif
5887
5888 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
5889
5890 /* The top-level BLOCK isn't numbered at all. */
5891 for (i = 1; i < n_blocks; ++i)
5892 /* We number the blocks from two. */
5893 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
5894
5895 free (block_vector);
5896
5897 return;
5898 }
5899 \f
5900 /* Allocate a function structure and reset its contents to the defaults. */
5901 static void
5902 prepare_function_start ()
5903 {
5904 cfun = (struct function *) xcalloc (1, sizeof (struct function));
5905
5906 init_stmt_for_function ();
5907 init_eh_for_function ();
5908
5909 cse_not_expected = ! optimize;
5910
5911 /* Caller save not needed yet. */
5912 caller_save_needed = 0;
5913
5914 /* No stack slots have been made yet. */
5915 stack_slot_list = 0;
5916
5917 current_function_has_nonlocal_label = 0;
5918 current_function_has_nonlocal_goto = 0;
5919
5920 /* There is no stack slot for handling nonlocal gotos. */
5921 nonlocal_goto_handler_slots = 0;
5922 nonlocal_goto_stack_level = 0;
5923
5924 /* No labels have been declared for nonlocal use. */
5925 nonlocal_labels = 0;
5926 nonlocal_goto_handler_labels = 0;
5927
5928 /* No function calls so far in this function. */
5929 function_call_count = 0;
5930
5931 /* No parm regs have been allocated.
5932 (This is important for output_inline_function.) */
5933 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5934
5935 /* Initialize the RTL mechanism. */
5936 init_emit ();
5937
5938 /* Initialize the queue of pending postincrement and postdecrements,
5939 and some other info in expr.c. */
5940 init_expr ();
5941
5942 /* We haven't done register allocation yet. */
5943 reg_renumber = 0;
5944
5945 init_varasm_status (cfun);
5946
5947 /* Clear out data used for inlining. */
5948 cfun->inlinable = 0;
5949 cfun->original_decl_initial = 0;
5950 cfun->original_arg_vector = 0;
5951
5952 #ifdef STACK_BOUNDARY
5953 cfun->stack_alignment_needed = STACK_BOUNDARY;
5954 cfun->preferred_stack_boundary = STACK_BOUNDARY;
5955 #else
5956 cfun->stack_alignment_needed = 0;
5957 cfun->preferred_stack_boundary = 0;
5958 #endif
5959
5960 /* Set if a call to setjmp is seen. */
5961 current_function_calls_setjmp = 0;
5962
5963 /* Set if a call to longjmp is seen. */
5964 current_function_calls_longjmp = 0;
5965
5966 current_function_calls_alloca = 0;
5967 current_function_contains_functions = 0;
5968 current_function_is_leaf = 0;
5969 current_function_nothrow = 0;
5970 current_function_sp_is_unchanging = 0;
5971 current_function_uses_only_leaf_regs = 0;
5972 current_function_has_computed_jump = 0;
5973 current_function_is_thunk = 0;
5974
5975 current_function_returns_pcc_struct = 0;
5976 current_function_returns_struct = 0;
5977 current_function_epilogue_delay_list = 0;
5978 current_function_uses_const_pool = 0;
5979 current_function_uses_pic_offset_table = 0;
5980 current_function_cannot_inline = 0;
5981
5982 /* We have not yet needed to make a label to jump to for tail-recursion. */
5983 tail_recursion_label = 0;
5984
5985 /* We haven't had a need to make a save area for ap yet. */
5986 arg_pointer_save_area = 0;
5987
5988 /* No stack slots allocated yet. */
5989 frame_offset = 0;
5990
5991 /* No SAVE_EXPRs in this function yet. */
5992 save_expr_regs = 0;
5993
5994 /* No RTL_EXPRs in this function yet. */
5995 rtl_expr_chain = 0;
5996
5997 /* Set up to allocate temporaries. */
5998 init_temp_slots ();
5999
6000 /* Indicate that we need to distinguish between the return value of the
6001 present function and the return value of a function being called. */
6002 rtx_equal_function_value_matters = 1;
6003
6004 /* Indicate that we have not instantiated virtual registers yet. */
6005 virtuals_instantiated = 0;
6006
6007 /* Indicate that we want CONCATs now. */
6008 generating_concat_p = 1;
6009
6010 /* Indicate we have no need of a frame pointer yet. */
6011 frame_pointer_needed = 0;
6012
6013 /* By default assume not varargs or stdarg. */
6014 current_function_varargs = 0;
6015 current_function_stdarg = 0;
6016
6017 /* We haven't made any trampolines for this function yet. */
6018 trampoline_list = 0;
6019
6020 init_pending_stack_adjust ();
6021 inhibit_defer_pop = 0;
6022
6023 current_function_outgoing_args_size = 0;
6024
6025 if (init_lang_status)
6026 (*init_lang_status) (cfun);
6027 if (init_machine_status)
6028 (*init_machine_status) (cfun);
6029 }
6030
6031 /* Initialize the rtl expansion mechanism so that we can do simple things
6032 like generate sequences. This is used to provide a context during global
6033 initialization of some passes. */
6034 void
6035 init_dummy_function_start ()
6036 {
6037 prepare_function_start ();
6038 }
6039
6040 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6041 and initialize static variables for generating RTL for the statements
6042 of the function. */
6043
6044 void
6045 init_function_start (subr, filename, line)
6046 tree subr;
6047 const char *filename;
6048 int line;
6049 {
6050 prepare_function_start ();
6051
6052 /* Remember this function for later. */
6053 cfun->next_global = all_functions;
6054 all_functions = cfun;
6055
6056 current_function_name = (*decl_printable_name) (subr, 2);
6057 cfun->decl = subr;
6058
6059 /* Nonzero if this is a nested function that uses a static chain. */
6060
6061 current_function_needs_context
6062 = (decl_function_context (current_function_decl) != 0
6063 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6064
6065 /* Within function body, compute a type's size as soon it is laid out. */
6066 immediate_size_expand++;
6067
6068 /* Prevent ever trying to delete the first instruction of a function.
6069 Also tell final how to output a linenum before the function prologue.
6070 Note linenums could be missing, e.g. when compiling a Java .class file. */
6071 if (line > 0)
6072 emit_line_note (filename, line);
6073
6074 /* Make sure first insn is a note even if we don't want linenums.
6075 This makes sure the first insn will never be deleted.
6076 Also, final expects a note to appear there. */
6077 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6078
6079 /* Set flags used by final.c. */
6080 if (aggregate_value_p (DECL_RESULT (subr)))
6081 {
6082 #ifdef PCC_STATIC_STRUCT_RETURN
6083 current_function_returns_pcc_struct = 1;
6084 #endif
6085 current_function_returns_struct = 1;
6086 }
6087
6088 /* Warn if this value is an aggregate type,
6089 regardless of which calling convention we are using for it. */
6090 if (warn_aggregate_return
6091 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6092 warning ("function returns an aggregate");
6093
6094 current_function_returns_pointer
6095 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6096 }
6097
6098 /* Make sure all values used by the optimization passes have sane
6099 defaults. */
6100 void
6101 init_function_for_compilation ()
6102 {
6103 reg_renumber = 0;
6104
6105 /* No prologue/epilogue insns yet. */
6106 VARRAY_GROW (prologue, 0);
6107 VARRAY_GROW (epilogue, 0);
6108 VARRAY_GROW (sibcall_epilogue, 0);
6109 }
6110
6111 /* Indicate that the current function uses extra args
6112 not explicitly mentioned in the argument list in any fashion. */
6113
6114 void
6115 mark_varargs ()
6116 {
6117 current_function_varargs = 1;
6118 }
6119
6120 /* Expand a call to __main at the beginning of a possible main function. */
6121
6122 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6123 #undef HAS_INIT_SECTION
6124 #define HAS_INIT_SECTION
6125 #endif
6126
6127 void
6128 expand_main_function ()
6129 {
6130 #if !defined (HAS_INIT_SECTION)
6131 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
6132 VOIDmode, 0);
6133 #endif /* not HAS_INIT_SECTION */
6134 }
6135 \f
6136 extern struct obstack permanent_obstack;
6137
6138 /* Start the RTL for a new function, and set variables used for
6139 emitting RTL.
6140 SUBR is the FUNCTION_DECL node.
6141 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6142 the function's parameters, which must be run at any return statement. */
6143
6144 void
6145 expand_function_start (subr, parms_have_cleanups)
6146 tree subr;
6147 int parms_have_cleanups;
6148 {
6149 tree tem;
6150 rtx last_ptr = NULL_RTX;
6151
6152 /* Make sure volatile mem refs aren't considered
6153 valid operands of arithmetic insns. */
6154 init_recog_no_volatile ();
6155
6156 /* Set this before generating any memory accesses. */
6157 current_function_check_memory_usage
6158 = (flag_check_memory_usage
6159 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
6160
6161 current_function_instrument_entry_exit
6162 = (flag_instrument_function_entry_exit
6163 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6164
6165 current_function_limit_stack
6166 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6167
6168 /* If function gets a static chain arg, store it in the stack frame.
6169 Do this first, so it gets the first stack slot offset. */
6170 if (current_function_needs_context)
6171 {
6172 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6173
6174 /* Delay copying static chain if it is not a register to avoid
6175 conflicts with regs used for parameters. */
6176 if (! SMALL_REGISTER_CLASSES
6177 || GET_CODE (static_chain_incoming_rtx) == REG)
6178 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6179 }
6180
6181 /* If the parameters of this function need cleaning up, get a label
6182 for the beginning of the code which executes those cleanups. This must
6183 be done before doing anything with return_label. */
6184 if (parms_have_cleanups)
6185 cleanup_label = gen_label_rtx ();
6186 else
6187 cleanup_label = 0;
6188
6189 /* Make the label for return statements to jump to, if this machine
6190 does not have a one-instruction return and uses an epilogue,
6191 or if it returns a structure, or if it has parm cleanups. */
6192 #ifdef HAVE_return
6193 if (cleanup_label == 0 && HAVE_return
6194 && ! current_function_instrument_entry_exit
6195 && ! current_function_returns_pcc_struct
6196 && ! (current_function_returns_struct && ! optimize))
6197 return_label = 0;
6198 else
6199 return_label = gen_label_rtx ();
6200 #else
6201 return_label = gen_label_rtx ();
6202 #endif
6203
6204 /* Initialize rtx used to return the value. */
6205 /* Do this before assign_parms so that we copy the struct value address
6206 before any library calls that assign parms might generate. */
6207
6208 /* Decide whether to return the value in memory or in a register. */
6209 if (aggregate_value_p (DECL_RESULT (subr)))
6210 {
6211 /* Returning something that won't go in a register. */
6212 register rtx value_address = 0;
6213
6214 #ifdef PCC_STATIC_STRUCT_RETURN
6215 if (current_function_returns_pcc_struct)
6216 {
6217 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6218 value_address = assemble_static_space (size);
6219 }
6220 else
6221 #endif
6222 {
6223 /* Expect to be passed the address of a place to store the value.
6224 If it is passed as an argument, assign_parms will take care of
6225 it. */
6226 if (struct_value_incoming_rtx)
6227 {
6228 value_address = gen_reg_rtx (Pmode);
6229 emit_move_insn (value_address, struct_value_incoming_rtx);
6230 }
6231 }
6232 if (value_address)
6233 {
6234 DECL_RTL (DECL_RESULT (subr))
6235 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6236 set_mem_attributes (DECL_RTL (DECL_RESULT (subr)),
6237 DECL_RESULT (subr), 1);
6238 }
6239 }
6240 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6241 /* If return mode is void, this decl rtl should not be used. */
6242 DECL_RTL (DECL_RESULT (subr)) = 0;
6243 else if (parms_have_cleanups || current_function_instrument_entry_exit)
6244 {
6245 /* If function will end with cleanup code for parms,
6246 compute the return values into a pseudo reg,
6247 which we will copy into the true return register
6248 after the cleanups are done. */
6249
6250 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
6251
6252 #ifdef PROMOTE_FUNCTION_RETURN
6253 tree type = TREE_TYPE (DECL_RESULT (subr));
6254 int unsignedp = TREE_UNSIGNED (type);
6255
6256 mode = promote_mode (type, mode, &unsignedp, 1);
6257 #endif
6258
6259 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
6260 }
6261 else
6262 /* Scalar, returned in a register. */
6263 {
6264 DECL_RTL (DECL_RESULT (subr))
6265 = hard_function_value (TREE_TYPE (DECL_RESULT (subr)), subr, 1);
6266
6267 /* Mark this reg as the function's return value. */
6268 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6269 {
6270 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6271 /* Needed because we may need to move this to memory
6272 in case it's a named return value whose address is taken. */
6273 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6274 }
6275 }
6276
6277 /* Initialize rtx for parameters and local variables.
6278 In some cases this requires emitting insns. */
6279
6280 assign_parms (subr);
6281
6282 /* Copy the static chain now if it wasn't a register. The delay is to
6283 avoid conflicts with the parameter passing registers. */
6284
6285 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6286 if (GET_CODE (static_chain_incoming_rtx) != REG)
6287 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6288
6289 /* The following was moved from init_function_start.
6290 The move is supposed to make sdb output more accurate. */
6291 /* Indicate the beginning of the function body,
6292 as opposed to parm setup. */
6293 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6294
6295 if (GET_CODE (get_last_insn ()) != NOTE)
6296 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6297 parm_birth_insn = get_last_insn ();
6298
6299 context_display = 0;
6300 if (current_function_needs_context)
6301 {
6302 /* Fetch static chain values for containing functions. */
6303 tem = decl_function_context (current_function_decl);
6304 /* Copy the static chain pointer into a pseudo. If we have
6305 small register classes, copy the value from memory if
6306 static_chain_incoming_rtx is a REG. */
6307 if (tem)
6308 {
6309 /* If the static chain originally came in a register, put it back
6310 there, then move it out in the next insn. The reason for
6311 this peculiar code is to satisfy function integration. */
6312 if (SMALL_REGISTER_CLASSES
6313 && GET_CODE (static_chain_incoming_rtx) == REG)
6314 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6315 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6316 }
6317
6318 while (tem)
6319 {
6320 tree rtlexp = make_node (RTL_EXPR);
6321
6322 RTL_EXPR_RTL (rtlexp) = last_ptr;
6323 context_display = tree_cons (tem, rtlexp, context_display);
6324 tem = decl_function_context (tem);
6325 if (tem == 0)
6326 break;
6327 /* Chain thru stack frames, assuming pointer to next lexical frame
6328 is found at the place we always store it. */
6329 #ifdef FRAME_GROWS_DOWNWARD
6330 last_ptr = plus_constant (last_ptr,
6331 -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6332 #endif
6333 last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6334 MEM_ALIAS_SET (last_ptr) = get_frame_alias_set ();
6335 last_ptr = copy_to_reg (last_ptr);
6336
6337 /* If we are not optimizing, ensure that we know that this
6338 piece of context is live over the entire function. */
6339 if (! optimize)
6340 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6341 save_expr_regs);
6342 }
6343 }
6344
6345 if (current_function_instrument_entry_exit)
6346 {
6347 rtx fun = DECL_RTL (current_function_decl);
6348 if (GET_CODE (fun) == MEM)
6349 fun = XEXP (fun, 0);
6350 else
6351 abort ();
6352 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6353 fun, Pmode,
6354 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6355 0,
6356 hard_frame_pointer_rtx),
6357 Pmode);
6358 }
6359
6360 /* After the display initializations is where the tail-recursion label
6361 should go, if we end up needing one. Ensure we have a NOTE here
6362 since some things (like trampolines) get placed before this. */
6363 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6364
6365 /* Evaluate now the sizes of any types declared among the arguments. */
6366 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6367 {
6368 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6369 EXPAND_MEMORY_USE_BAD);
6370 /* Flush the queue in case this parameter declaration has
6371 side-effects. */
6372 emit_queue ();
6373 }
6374
6375 /* Make sure there is a line number after the function entry setup code. */
6376 force_next_line_note ();
6377 }
6378 \f
6379 /* Undo the effects of init_dummy_function_start. */
6380 void
6381 expand_dummy_function_end ()
6382 {
6383 /* End any sequences that failed to be closed due to syntax errors. */
6384 while (in_sequence_p ())
6385 end_sequence ();
6386
6387 /* Outside function body, can't compute type's actual size
6388 until next function's body starts. */
6389
6390 free_after_parsing (cfun);
6391 free_after_compilation (cfun);
6392 free (cfun);
6393 cfun = 0;
6394 }
6395
6396 /* Call DOIT for each hard register used as a return value from
6397 the current function. */
6398
6399 void
6400 diddle_return_value (doit, arg)
6401 void (*doit) PARAMS ((rtx, void *));
6402 void *arg;
6403 {
6404 rtx outgoing = current_function_return_rtx;
6405 int pcc;
6406
6407 if (! outgoing)
6408 return;
6409
6410 pcc = (current_function_returns_struct
6411 || current_function_returns_pcc_struct);
6412
6413 if ((GET_CODE (outgoing) == REG
6414 && REGNO (outgoing) >= FIRST_PSEUDO_REGISTER)
6415 || pcc)
6416 {
6417 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6418
6419 /* A PCC-style return returns a pointer to the memory in which
6420 the structure is stored. */
6421 if (pcc)
6422 type = build_pointer_type (type);
6423
6424 #ifdef FUNCTION_OUTGOING_VALUE
6425 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
6426 #else
6427 outgoing = FUNCTION_VALUE (type, current_function_decl);
6428 #endif
6429 /* If this is a BLKmode structure being returned in registers, then use
6430 the mode computed in expand_return. */
6431 if (GET_MODE (outgoing) == BLKmode)
6432 PUT_MODE (outgoing, GET_MODE (current_function_return_rtx));
6433 REG_FUNCTION_VALUE_P (outgoing) = 1;
6434 }
6435
6436 if (GET_CODE (outgoing) == REG)
6437 (*doit) (outgoing, arg);
6438 else if (GET_CODE (outgoing) == PARALLEL)
6439 {
6440 int i;
6441
6442 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6443 {
6444 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6445
6446 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6447 (*doit) (x, arg);
6448 }
6449 }
6450 }
6451
6452 static void
6453 do_clobber_return_reg (reg, arg)
6454 rtx reg;
6455 void *arg ATTRIBUTE_UNUSED;
6456 {
6457 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6458 }
6459
6460 void
6461 clobber_return_register ()
6462 {
6463 diddle_return_value (do_clobber_return_reg, NULL);
6464 }
6465
6466 static void
6467 do_use_return_reg (reg, arg)
6468 rtx reg;
6469 void *arg ATTRIBUTE_UNUSED;
6470 {
6471 emit_insn (gen_rtx_USE (VOIDmode, reg));
6472 }
6473
6474 void
6475 use_return_register ()
6476 {
6477 diddle_return_value (do_use_return_reg, NULL);
6478 }
6479
6480 /* Generate RTL for the end of the current function.
6481 FILENAME and LINE are the current position in the source file.
6482
6483 It is up to language-specific callers to do cleanups for parameters--
6484 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6485
6486 void
6487 expand_function_end (filename, line, end_bindings)
6488 const char *filename;
6489 int line;
6490 int end_bindings;
6491 {
6492 tree link;
6493
6494 #ifdef TRAMPOLINE_TEMPLATE
6495 static rtx initial_trampoline;
6496 #endif
6497
6498 finish_expr_for_function ();
6499
6500 #ifdef NON_SAVING_SETJMP
6501 /* Don't put any variables in registers if we call setjmp
6502 on a machine that fails to restore the registers. */
6503 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6504 {
6505 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6506 setjmp_protect (DECL_INITIAL (current_function_decl));
6507
6508 setjmp_protect_args ();
6509 }
6510 #endif
6511
6512 /* Save the argument pointer if a save area was made for it. */
6513 if (arg_pointer_save_area)
6514 {
6515 /* arg_pointer_save_area may not be a valid memory address, so we
6516 have to check it and fix it if necessary. */
6517 rtx seq;
6518 start_sequence ();
6519 emit_move_insn (validize_mem (arg_pointer_save_area),
6520 virtual_incoming_args_rtx);
6521 seq = gen_sequence ();
6522 end_sequence ();
6523 emit_insn_before (seq, tail_recursion_reentry);
6524 }
6525
6526 /* Initialize any trampolines required by this function. */
6527 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6528 {
6529 tree function = TREE_PURPOSE (link);
6530 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6531 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6532 #ifdef TRAMPOLINE_TEMPLATE
6533 rtx blktramp;
6534 #endif
6535 rtx seq;
6536
6537 #ifdef TRAMPOLINE_TEMPLATE
6538 /* First make sure this compilation has a template for
6539 initializing trampolines. */
6540 if (initial_trampoline == 0)
6541 {
6542 initial_trampoline
6543 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6544
6545 ggc_add_rtx_root (&initial_trampoline, 1);
6546 }
6547 #endif
6548
6549 /* Generate insns to initialize the trampoline. */
6550 start_sequence ();
6551 tramp = round_trampoline_addr (XEXP (tramp, 0));
6552 #ifdef TRAMPOLINE_TEMPLATE
6553 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6554 emit_block_move (blktramp, initial_trampoline,
6555 GEN_INT (TRAMPOLINE_SIZE),
6556 TRAMPOLINE_ALIGNMENT);
6557 #endif
6558 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6559 seq = get_insns ();
6560 end_sequence ();
6561
6562 /* Put those insns at entry to the containing function (this one). */
6563 emit_insns_before (seq, tail_recursion_reentry);
6564 }
6565
6566 /* If we are doing stack checking and this function makes calls,
6567 do a stack probe at the start of the function to ensure we have enough
6568 space for another stack frame. */
6569 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6570 {
6571 rtx insn, seq;
6572
6573 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6574 if (GET_CODE (insn) == CALL_INSN)
6575 {
6576 start_sequence ();
6577 probe_stack_range (STACK_CHECK_PROTECT,
6578 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6579 seq = get_insns ();
6580 end_sequence ();
6581 emit_insns_before (seq, tail_recursion_reentry);
6582 break;
6583 }
6584 }
6585
6586 /* Warn about unused parms if extra warnings were specified. */
6587 /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6588 warning. WARN_UNUSED_PARAMETER is negative when set by
6589 -Wunused. */
6590 if (warn_unused_parameter > 0
6591 || (warn_unused_parameter < 0 && extra_warnings))
6592 {
6593 tree decl;
6594
6595 for (decl = DECL_ARGUMENTS (current_function_decl);
6596 decl; decl = TREE_CHAIN (decl))
6597 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6598 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6599 warning_with_decl (decl, "unused parameter `%s'");
6600 }
6601
6602 /* Delete handlers for nonlocal gotos if nothing uses them. */
6603 if (nonlocal_goto_handler_slots != 0
6604 && ! current_function_has_nonlocal_label)
6605 delete_handlers ();
6606
6607 /* End any sequences that failed to be closed due to syntax errors. */
6608 while (in_sequence_p ())
6609 end_sequence ();
6610
6611 /* Outside function body, can't compute type's actual size
6612 until next function's body starts. */
6613 immediate_size_expand--;
6614
6615 clear_pending_stack_adjust ();
6616 do_pending_stack_adjust ();
6617
6618 /* Mark the end of the function body.
6619 If control reaches this insn, the function can drop through
6620 without returning a value. */
6621 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6622
6623 /* Must mark the last line number note in the function, so that the test
6624 coverage code can avoid counting the last line twice. This just tells
6625 the code to ignore the immediately following line note, since there
6626 already exists a copy of this note somewhere above. This line number
6627 note is still needed for debugging though, so we can't delete it. */
6628 if (flag_test_coverage)
6629 emit_note (NULL_PTR, NOTE_INSN_REPEATED_LINE_NUMBER);
6630
6631 /* Output a linenumber for the end of the function.
6632 SDB depends on this. */
6633 emit_line_note_force (filename, line);
6634
6635 /* Output the label for the actual return from the function,
6636 if one is expected. This happens either because a function epilogue
6637 is used instead of a return instruction, or because a return was done
6638 with a goto in order to run local cleanups, or because of pcc-style
6639 structure returning. */
6640
6641 if (return_label)
6642 {
6643 rtx before, after;
6644
6645 /* Before the return label, clobber the return registers so that
6646 they are not propogated live to the rest of the function. This
6647 can only happen with functions that drop through; if there had
6648 been a return statement, there would have either been a return
6649 rtx, or a jump to the return label. */
6650
6651 before = get_last_insn ();
6652 clobber_return_register ();
6653 after = get_last_insn ();
6654
6655 if (before != after)
6656 cfun->x_clobber_return_insn = after;
6657
6658 emit_label (return_label);
6659 }
6660
6661 /* C++ uses this. */
6662 if (end_bindings)
6663 expand_end_bindings (0, 0, 0);
6664
6665 /* Now handle any leftover exception regions that may have been
6666 created for the parameters. */
6667 {
6668 rtx last = get_last_insn ();
6669 rtx label;
6670
6671 expand_leftover_cleanups ();
6672
6673 /* If there are any catch_clauses remaining, output them now. */
6674 emit_insns (catch_clauses);
6675 catch_clauses = catch_clauses_last = NULL_RTX;
6676 /* If the above emitted any code, may sure we jump around it. */
6677 if (last != get_last_insn ())
6678 {
6679 label = gen_label_rtx ();
6680 last = emit_jump_insn_after (gen_jump (label), last);
6681 last = emit_barrier_after (last);
6682 emit_label (label);
6683 }
6684 }
6685
6686 if (current_function_instrument_entry_exit)
6687 {
6688 rtx fun = DECL_RTL (current_function_decl);
6689 if (GET_CODE (fun) == MEM)
6690 fun = XEXP (fun, 0);
6691 else
6692 abort ();
6693 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6694 fun, Pmode,
6695 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6696 0,
6697 hard_frame_pointer_rtx),
6698 Pmode);
6699 }
6700
6701 /* If we had calls to alloca, and this machine needs
6702 an accurate stack pointer to exit the function,
6703 insert some code to save and restore the stack pointer. */
6704 #ifdef EXIT_IGNORE_STACK
6705 if (! EXIT_IGNORE_STACK)
6706 #endif
6707 if (current_function_calls_alloca)
6708 {
6709 rtx tem = 0;
6710
6711 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6712 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6713 }
6714
6715 /* If scalar return value was computed in a pseudo-reg, or was a named
6716 return value that got dumped to the stack, copy that to the hard
6717 return register. */
6718 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0)
6719 {
6720 tree decl_result = DECL_RESULT (current_function_decl);
6721 rtx decl_rtl = DECL_RTL (decl_result);
6722
6723 if (REG_P (decl_rtl)
6724 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
6725 : DECL_REGISTER (decl_result))
6726 {
6727 rtx real_decl_rtl;
6728
6729 #ifdef FUNCTION_OUTGOING_VALUE
6730 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
6731 current_function_decl);
6732 #else
6733 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
6734 current_function_decl);
6735 #endif
6736 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
6737
6738 /* If this is a BLKmode structure being returned in registers,
6739 then use the mode computed in expand_return. Note that if
6740 decl_rtl is memory, then its mode may have been changed,
6741 but that current_function_return_rtx has not. */
6742 if (GET_MODE (real_decl_rtl) == BLKmode)
6743 PUT_MODE (real_decl_rtl, GET_MODE (current_function_return_rtx));
6744
6745 /* If a named return value dumped decl_return to memory, then
6746 we may need to re-do the PROMOTE_MODE signed/unsigned
6747 extension. */
6748 if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
6749 {
6750 int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
6751
6752 #ifdef PROMOTE_FUNCTION_RETURN
6753 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
6754 &unsignedp, 1);
6755 #endif
6756
6757 convert_move (real_decl_rtl, decl_rtl, unsignedp);
6758 }
6759 else
6760 emit_move_insn (real_decl_rtl, decl_rtl);
6761
6762 /* The delay slot scheduler assumes that current_function_return_rtx
6763 holds the hard register containing the return value, not a
6764 temporary pseudo. */
6765 current_function_return_rtx = real_decl_rtl;
6766 }
6767 }
6768
6769 /* If returning a structure, arrange to return the address of the value
6770 in a place where debuggers expect to find it.
6771
6772 If returning a structure PCC style,
6773 the caller also depends on this value.
6774 And current_function_returns_pcc_struct is not necessarily set. */
6775 if (current_function_returns_struct
6776 || current_function_returns_pcc_struct)
6777 {
6778 rtx value_address =
6779 XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6780 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6781 #ifdef FUNCTION_OUTGOING_VALUE
6782 rtx outgoing
6783 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6784 current_function_decl);
6785 #else
6786 rtx outgoing
6787 = FUNCTION_VALUE (build_pointer_type (type),
6788 current_function_decl);
6789 #endif
6790
6791 /* Mark this as a function return value so integrate will delete the
6792 assignment and USE below when inlining this function. */
6793 REG_FUNCTION_VALUE_P (outgoing) = 1;
6794
6795 emit_move_insn (outgoing, value_address);
6796 }
6797
6798 /* ??? This should no longer be necessary since stupid is no longer with
6799 us, but there are some parts of the compiler (eg reload_combine, and
6800 sh mach_dep_reorg) that still try and compute their own lifetime info
6801 instead of using the general framework. */
6802 use_return_register ();
6803
6804 /* If this is an implementation of __throw, do what's necessary to
6805 communicate between __builtin_eh_return and the epilogue. */
6806 expand_eh_return ();
6807
6808 /* Output a return insn if we are using one.
6809 Otherwise, let the rtl chain end here, to drop through
6810 into the epilogue. */
6811
6812 #ifdef HAVE_return
6813 if (HAVE_return)
6814 {
6815 emit_jump_insn (gen_return ());
6816 emit_barrier ();
6817 }
6818 #endif
6819
6820 /* Fix up any gotos that jumped out to the outermost
6821 binding level of the function.
6822 Must follow emitting RETURN_LABEL. */
6823
6824 /* If you have any cleanups to do at this point,
6825 and they need to create temporary variables,
6826 then you will lose. */
6827 expand_fixups (get_insns ());
6828 }
6829 \f
6830 /* Extend a vector that records the INSN_UIDs of INSNS (either a
6831 sequence or a single insn). */
6832
6833 static void
6834 record_insns (insns, vecp)
6835 rtx insns;
6836 varray_type *vecp;
6837 {
6838 if (GET_CODE (insns) == SEQUENCE)
6839 {
6840 int len = XVECLEN (insns, 0);
6841 int i = VARRAY_SIZE (*vecp);
6842
6843 VARRAY_GROW (*vecp, i + len);
6844 while (--len >= 0)
6845 {
6846 VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
6847 ++i;
6848 }
6849 }
6850 else
6851 {
6852 int i = VARRAY_SIZE (*vecp);
6853 VARRAY_GROW (*vecp, i + 1);
6854 VARRAY_INT (*vecp, i) = INSN_UID (insns);
6855 }
6856 }
6857
6858 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6859
6860 static int
6861 contains (insn, vec)
6862 rtx insn;
6863 varray_type vec;
6864 {
6865 register int i, j;
6866
6867 if (GET_CODE (insn) == INSN
6868 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6869 {
6870 int count = 0;
6871 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6872 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
6873 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
6874 count++;
6875 return count;
6876 }
6877 else
6878 {
6879 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
6880 if (INSN_UID (insn) == VARRAY_INT (vec, j))
6881 return 1;
6882 }
6883 return 0;
6884 }
6885
6886 int
6887 prologue_epilogue_contains (insn)
6888 rtx insn;
6889 {
6890 if (contains (insn, prologue))
6891 return 1;
6892 if (contains (insn, epilogue))
6893 return 1;
6894 return 0;
6895 }
6896
6897 int
6898 sibcall_epilogue_contains (insn)
6899 rtx insn;
6900 {
6901 if (sibcall_epilogue)
6902 return contains (insn, sibcall_epilogue);
6903 return 0;
6904 }
6905
6906 #ifdef HAVE_return
6907 /* Insert gen_return at the end of block BB. This also means updating
6908 block_for_insn appropriately. */
6909
6910 static void
6911 emit_return_into_block (bb, line_note)
6912 basic_block bb;
6913 rtx line_note;
6914 {
6915 rtx p, end;
6916
6917 p = NEXT_INSN (bb->end);
6918 end = emit_jump_insn_after (gen_return (), bb->end);
6919 if (line_note)
6920 emit_line_note_after (NOTE_SOURCE_FILE (line_note),
6921 NOTE_LINE_NUMBER (line_note), bb->end);
6922
6923 while (1)
6924 {
6925 set_block_for_insn (p, bb);
6926 if (p == bb->end)
6927 break;
6928 p = PREV_INSN (p);
6929 }
6930 bb->end = end;
6931 }
6932 #endif /* HAVE_return */
6933
6934 #ifdef HAVE_epilogue
6935
6936 /* Modify SEQ, a SEQUENCE that is part of the epilogue, to no modifications
6937 to the stack pointer. */
6938
6939 static void
6940 keep_stack_depressed (seq)
6941 rtx seq;
6942 {
6943 int i;
6944 rtx sp_from_reg = 0;
6945 int sp_modified_unknown = 0;
6946
6947 /* If the epilogue is just a single instruction, it's OK as is */
6948
6949 if (GET_CODE (seq) != SEQUENCE) return;
6950
6951 /* Scan all insns in SEQ looking for ones that modified the stack
6952 pointer. Record if it modified the stack pointer by copying it
6953 from the frame pointer or if it modified it in some other way.
6954 Then modify any subsequent stack pointer references to take that
6955 into account. We start by only allowing SP to be copied from a
6956 register (presumably FP) and then be subsequently referenced. */
6957
6958 for (i = 0; i < XVECLEN (seq, 0); i++)
6959 {
6960 rtx insn = XVECEXP (seq, 0, i);
6961
6962 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6963 continue;
6964
6965 if (reg_set_p (stack_pointer_rtx, insn))
6966 {
6967 rtx set = single_set (insn);
6968
6969 /* If SP is set as a side-effect, we can't support this. */
6970 if (set == 0)
6971 abort ();
6972
6973 if (GET_CODE (SET_SRC (set)) == REG)
6974 sp_from_reg = SET_SRC (set);
6975 else
6976 sp_modified_unknown = 1;
6977
6978 /* Don't allow the SP modification to happen. */
6979 PUT_CODE (insn, NOTE);
6980 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
6981 NOTE_SOURCE_FILE (insn) = 0;
6982 }
6983 else if (reg_referenced_p (stack_pointer_rtx, PATTERN (insn)))
6984 {
6985 if (sp_modified_unknown)
6986 abort ();
6987
6988 else if (sp_from_reg != 0)
6989 PATTERN (insn)
6990 = replace_rtx (PATTERN (insn), stack_pointer_rtx, sp_from_reg);
6991 }
6992 }
6993 }
6994 #endif
6995
6996 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6997 this into place with notes indicating where the prologue ends and where
6998 the epilogue begins. Update the basic block information when possible. */
6999
7000 void
7001 thread_prologue_and_epilogue_insns (f)
7002 rtx f ATTRIBUTE_UNUSED;
7003 {
7004 int inserted = 0;
7005 edge e;
7006 rtx seq;
7007 #ifdef HAVE_prologue
7008 rtx prologue_end = NULL_RTX;
7009 #endif
7010 #if defined (HAVE_epilogue) || defined(HAVE_return)
7011 rtx epilogue_end = NULL_RTX;
7012 #endif
7013
7014 #ifdef HAVE_prologue
7015 if (HAVE_prologue)
7016 {
7017 start_sequence ();
7018 seq = gen_prologue ();
7019 emit_insn (seq);
7020
7021 /* Retain a map of the prologue insns. */
7022 if (GET_CODE (seq) != SEQUENCE)
7023 seq = get_insns ();
7024 record_insns (seq, &prologue);
7025 prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7026
7027 seq = gen_sequence ();
7028 end_sequence ();
7029
7030 /* If optimization is off, and perhaps in an empty function,
7031 the entry block will have no successors. */
7032 if (ENTRY_BLOCK_PTR->succ)
7033 {
7034 /* Can't deal with multiple successsors of the entry block. */
7035 if (ENTRY_BLOCK_PTR->succ->succ_next)
7036 abort ();
7037
7038 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7039 inserted = 1;
7040 }
7041 else
7042 emit_insn_after (seq, f);
7043 }
7044 #endif
7045
7046 /* If the exit block has no non-fake predecessors, we don't need
7047 an epilogue. */
7048 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7049 if ((e->flags & EDGE_FAKE) == 0)
7050 break;
7051 if (e == NULL)
7052 goto epilogue_done;
7053
7054 #ifdef HAVE_return
7055 if (optimize && HAVE_return)
7056 {
7057 /* If we're allowed to generate a simple return instruction,
7058 then by definition we don't need a full epilogue. Examine
7059 the block that falls through to EXIT. If it does not
7060 contain any code, examine its predecessors and try to
7061 emit (conditional) return instructions. */
7062
7063 basic_block last;
7064 edge e_next;
7065 rtx label;
7066
7067 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7068 if (e->flags & EDGE_FALLTHRU)
7069 break;
7070 if (e == NULL)
7071 goto epilogue_done;
7072 last = e->src;
7073
7074 /* Verify that there are no active instructions in the last block. */
7075 label = last->end;
7076 while (label && GET_CODE (label) != CODE_LABEL)
7077 {
7078 if (active_insn_p (label))
7079 break;
7080 label = PREV_INSN (label);
7081 }
7082
7083 if (last->head == label && GET_CODE (label) == CODE_LABEL)
7084 {
7085 rtx epilogue_line_note = NULL_RTX;
7086
7087 /* Locate the line number associated with the closing brace,
7088 if we can find one. */
7089 for (seq = get_last_insn ();
7090 seq && ! active_insn_p (seq);
7091 seq = PREV_INSN (seq))
7092 if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7093 {
7094 epilogue_line_note = seq;
7095 break;
7096 }
7097
7098 for (e = last->pred; e; e = e_next)
7099 {
7100 basic_block bb = e->src;
7101 rtx jump;
7102
7103 e_next = e->pred_next;
7104 if (bb == ENTRY_BLOCK_PTR)
7105 continue;
7106
7107 jump = bb->end;
7108 if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7109 continue;
7110
7111 /* If we have an unconditional jump, we can replace that
7112 with a simple return instruction. */
7113 if (simplejump_p (jump))
7114 {
7115 emit_return_into_block (bb, epilogue_line_note);
7116 flow_delete_insn (jump);
7117 }
7118
7119 /* If we have a conditional jump, we can try to replace
7120 that with a conditional return instruction. */
7121 else if (condjump_p (jump))
7122 {
7123 rtx ret, *loc;
7124
7125 ret = SET_SRC (PATTERN (jump));
7126 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
7127 loc = &XEXP (ret, 1);
7128 else
7129 loc = &XEXP (ret, 2);
7130 ret = gen_rtx_RETURN (VOIDmode);
7131
7132 if (! validate_change (jump, loc, ret, 0))
7133 continue;
7134 if (JUMP_LABEL (jump))
7135 LABEL_NUSES (JUMP_LABEL (jump))--;
7136
7137 /* If this block has only one successor, it both jumps
7138 and falls through to the fallthru block, so we can't
7139 delete the edge. */
7140 if (bb->succ->succ_next == NULL)
7141 continue;
7142 }
7143 else
7144 continue;
7145
7146 /* Fix up the CFG for the successful change we just made. */
7147 redirect_edge_succ (e, EXIT_BLOCK_PTR);
7148 }
7149
7150 /* Emit a return insn for the exit fallthru block. Whether
7151 this is still reachable will be determined later. */
7152
7153 emit_barrier_after (last->end);
7154 emit_return_into_block (last, epilogue_line_note);
7155 epilogue_end = last->end;
7156 goto epilogue_done;
7157 }
7158 }
7159 #endif
7160 #ifdef HAVE_epilogue
7161 if (HAVE_epilogue)
7162 {
7163 /* Find the edge that falls through to EXIT. Other edges may exist
7164 due to RETURN instructions, but those don't need epilogues.
7165 There really shouldn't be a mixture -- either all should have
7166 been converted or none, however... */
7167
7168 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7169 if (e->flags & EDGE_FALLTHRU)
7170 break;
7171 if (e == NULL)
7172 goto epilogue_done;
7173
7174 start_sequence ();
7175 epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7176
7177 seq = gen_epilogue ();
7178
7179 /* If this function returns with the stack depressed, massage
7180 the epilogue to actually do that. */
7181 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7182 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7183 keep_stack_depressed (seq);
7184
7185 emit_jump_insn (seq);
7186
7187 /* Retain a map of the epilogue insns. */
7188 if (GET_CODE (seq) != SEQUENCE)
7189 seq = get_insns ();
7190 record_insns (seq, &epilogue);
7191
7192 seq = gen_sequence ();
7193 end_sequence ();
7194
7195 insert_insn_on_edge (seq, e);
7196 inserted = 1;
7197 }
7198 #endif
7199 epilogue_done:
7200
7201 if (inserted)
7202 commit_edge_insertions ();
7203
7204 #ifdef HAVE_sibcall_epilogue
7205 /* Emit sibling epilogues before any sibling call sites. */
7206 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7207 {
7208 basic_block bb = e->src;
7209 rtx insn = bb->end;
7210 rtx i;
7211 rtx newinsn;
7212
7213 if (GET_CODE (insn) != CALL_INSN
7214 || ! SIBLING_CALL_P (insn))
7215 continue;
7216
7217 start_sequence ();
7218 seq = gen_sibcall_epilogue ();
7219 end_sequence ();
7220
7221 i = PREV_INSN (insn);
7222 newinsn = emit_insn_before (seq, insn);
7223
7224 /* Update the UID to basic block map. */
7225 for (i = NEXT_INSN (i); i != insn; i = NEXT_INSN (i))
7226 set_block_for_insn (i, bb);
7227
7228 /* Retain a map of the epilogue insns. Used in life analysis to
7229 avoid getting rid of sibcall epilogue insns. */
7230 record_insns (GET_CODE (seq) == SEQUENCE
7231 ? seq : newinsn, &sibcall_epilogue);
7232 }
7233 #endif
7234
7235 #ifdef HAVE_prologue
7236 if (prologue_end)
7237 {
7238 rtx insn, prev;
7239
7240 /* GDB handles `break f' by setting a breakpoint on the first
7241 line note after the prologue. Which means (1) that if
7242 there are line number notes before where we inserted the
7243 prologue we should move them, and (2) we should generate a
7244 note before the end of the first basic block, if there isn't
7245 one already there. */
7246
7247 for (insn = prologue_end; insn; insn = prev)
7248 {
7249 prev = PREV_INSN (insn);
7250 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7251 {
7252 /* Note that we cannot reorder the first insn in the
7253 chain, since rest_of_compilation relies on that
7254 remaining constant. */
7255 if (prev == NULL)
7256 break;
7257 reorder_insns (insn, insn, prologue_end);
7258 }
7259 }
7260
7261 /* Find the last line number note in the first block. */
7262 for (insn = BASIC_BLOCK (0)->end;
7263 insn != prologue_end;
7264 insn = PREV_INSN (insn))
7265 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7266 break;
7267
7268 /* If we didn't find one, make a copy of the first line number
7269 we run across. */
7270 if (! insn)
7271 {
7272 for (insn = next_active_insn (prologue_end);
7273 insn;
7274 insn = PREV_INSN (insn))
7275 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7276 {
7277 emit_line_note_after (NOTE_SOURCE_FILE (insn),
7278 NOTE_LINE_NUMBER (insn),
7279 prologue_end);
7280 break;
7281 }
7282 }
7283 }
7284 #endif
7285 #ifdef HAVE_epilogue
7286 if (epilogue_end)
7287 {
7288 rtx insn, next;
7289
7290 /* Similarly, move any line notes that appear after the epilogue.
7291 There is no need, however, to be quite so anal about the existance
7292 of such a note. */
7293 for (insn = epilogue_end; insn; insn = next)
7294 {
7295 next = NEXT_INSN (insn);
7296 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7297 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7298 }
7299 }
7300 #endif
7301 }
7302
7303 /* Reposition the prologue-end and epilogue-begin notes after instruction
7304 scheduling and delayed branch scheduling. */
7305
7306 void
7307 reposition_prologue_and_epilogue_notes (f)
7308 rtx f ATTRIBUTE_UNUSED;
7309 {
7310 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7311 int len;
7312
7313 if ((len = VARRAY_SIZE (prologue)) > 0)
7314 {
7315 register rtx insn, note = 0;
7316
7317 /* Scan from the beginning until we reach the last prologue insn.
7318 We apparently can't depend on basic_block_{head,end} after
7319 reorg has run. */
7320 for (insn = f; len && insn; insn = NEXT_INSN (insn))
7321 {
7322 if (GET_CODE (insn) == NOTE)
7323 {
7324 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7325 note = insn;
7326 }
7327 else if ((len -= contains (insn, prologue)) == 0)
7328 {
7329 rtx next;
7330 /* Find the prologue-end note if we haven't already, and
7331 move it to just after the last prologue insn. */
7332 if (note == 0)
7333 {
7334 for (note = insn; (note = NEXT_INSN (note));)
7335 if (GET_CODE (note) == NOTE
7336 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7337 break;
7338 }
7339
7340 next = NEXT_INSN (note);
7341
7342 /* Whether or not we can depend on BLOCK_HEAD,
7343 attempt to keep it up-to-date. */
7344 if (BLOCK_HEAD (0) == note)
7345 BLOCK_HEAD (0) = next;
7346
7347 remove_insn (note);
7348 add_insn_after (note, insn);
7349 }
7350 }
7351 }
7352
7353 if ((len = VARRAY_SIZE (epilogue)) > 0)
7354 {
7355 register rtx insn, note = 0;
7356
7357 /* Scan from the end until we reach the first epilogue insn.
7358 We apparently can't depend on basic_block_{head,end} after
7359 reorg has run. */
7360 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
7361 {
7362 if (GET_CODE (insn) == NOTE)
7363 {
7364 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
7365 note = insn;
7366 }
7367 else if ((len -= contains (insn, epilogue)) == 0)
7368 {
7369 /* Find the epilogue-begin note if we haven't already, and
7370 move it to just before the first epilogue insn. */
7371 if (note == 0)
7372 {
7373 for (note = insn; (note = PREV_INSN (note));)
7374 if (GET_CODE (note) == NOTE
7375 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
7376 break;
7377 }
7378
7379 /* Whether or not we can depend on BLOCK_HEAD,
7380 attempt to keep it up-to-date. */
7381 if (n_basic_blocks
7382 && BLOCK_HEAD (n_basic_blocks-1) == insn)
7383 BLOCK_HEAD (n_basic_blocks-1) = note;
7384
7385 remove_insn (note);
7386 add_insn_before (note, insn);
7387 }
7388 }
7389 }
7390 #endif /* HAVE_prologue or HAVE_epilogue */
7391 }
7392
7393 /* Mark T for GC. */
7394
7395 static void
7396 mark_temp_slot (t)
7397 struct temp_slot *t;
7398 {
7399 while (t)
7400 {
7401 ggc_mark_rtx (t->slot);
7402 ggc_mark_rtx (t->address);
7403 ggc_mark_tree (t->rtl_expr);
7404
7405 t = t->next;
7406 }
7407 }
7408
7409 /* Mark P for GC. */
7410
7411 static void
7412 mark_function_status (p)
7413 struct function *p;
7414 {
7415 int i;
7416 rtx *r;
7417
7418 if (p == 0)
7419 return;
7420
7421 ggc_mark_rtx (p->arg_offset_rtx);
7422
7423 if (p->x_parm_reg_stack_loc)
7424 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
7425 i > 0; --i, ++r)
7426 ggc_mark_rtx (*r);
7427
7428 ggc_mark_rtx (p->return_rtx);
7429 ggc_mark_rtx (p->x_cleanup_label);
7430 ggc_mark_rtx (p->x_return_label);
7431 ggc_mark_rtx (p->x_save_expr_regs);
7432 ggc_mark_rtx (p->x_stack_slot_list);
7433 ggc_mark_rtx (p->x_parm_birth_insn);
7434 ggc_mark_rtx (p->x_tail_recursion_label);
7435 ggc_mark_rtx (p->x_tail_recursion_reentry);
7436 ggc_mark_rtx (p->internal_arg_pointer);
7437 ggc_mark_rtx (p->x_arg_pointer_save_area);
7438 ggc_mark_tree (p->x_rtl_expr_chain);
7439 ggc_mark_rtx (p->x_last_parm_insn);
7440 ggc_mark_tree (p->x_context_display);
7441 ggc_mark_tree (p->x_trampoline_list);
7442 ggc_mark_rtx (p->epilogue_delay_list);
7443 ggc_mark_rtx (p->x_clobber_return_insn);
7444
7445 mark_temp_slot (p->x_temp_slots);
7446
7447 {
7448 struct var_refs_queue *q = p->fixup_var_refs_queue;
7449 while (q)
7450 {
7451 ggc_mark_rtx (q->modified);
7452 q = q->next;
7453 }
7454 }
7455
7456 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
7457 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
7458 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
7459 ggc_mark_tree (p->x_nonlocal_labels);
7460 }
7461
7462 /* Mark the function chain ARG (which is really a struct function **)
7463 for GC. */
7464
7465 static void
7466 mark_function_chain (arg)
7467 void *arg;
7468 {
7469 struct function *f = *(struct function **) arg;
7470
7471 for (; f; f = f->next_global)
7472 {
7473 ggc_mark_tree (f->decl);
7474
7475 mark_function_status (f);
7476 mark_eh_status (f->eh);
7477 mark_stmt_status (f->stmt);
7478 mark_expr_status (f->expr);
7479 mark_emit_status (f->emit);
7480 mark_varasm_status (f->varasm);
7481
7482 if (mark_machine_status)
7483 (*mark_machine_status) (f);
7484 if (mark_lang_status)
7485 (*mark_lang_status) (f);
7486
7487 if (f->original_arg_vector)
7488 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
7489 if (f->original_decl_initial)
7490 ggc_mark_tree (f->original_decl_initial);
7491 }
7492 }
7493
7494 /* Called once, at initialization, to initialize function.c. */
7495
7496 void
7497 init_function_once ()
7498 {
7499 ggc_add_root (&all_functions, 1, sizeof all_functions,
7500 mark_function_chain);
7501
7502 VARRAY_INT_INIT (prologue, 0, "prologue");
7503 VARRAY_INT_INIT (epilogue, 0, "epilogue");
7504 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
7505 }
This page took 0.361184 seconds and 6 git commands to generate.