]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
(digest_init): Use TYPE_MAIN_VARIANT comparing type of inside_init.
[gcc.git] / gcc / function.c
CommitLineData
6f086dfc 1/* Expands front end tree to back end RTL for GNU C-Compiler
1b2ac438 2 Copyright (C) 1987, 1988, 1989, 1991, 1992 Free Software Foundation, Inc.
6f086dfc
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
29
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
34
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
39
40#include "config.h"
41
42#include <stdio.h>
43
44#include "rtl.h"
45#include "tree.h"
46#include "flags.h"
47#include "function.h"
48#include "insn-flags.h"
49#include "expr.h"
50#include "insn-codes.h"
51#include "regs.h"
52#include "hard-reg-set.h"
53#include "insn-config.h"
54#include "recog.h"
55#include "output.h"
bdac5f58 56#include "basic-block.h"
6f086dfc
RS
57
58/* Round a value to the lowest integer less than it that is a multiple of
59 the required alignment. Avoid using division in case the value is
60 negative. Assume the alignment is a power of two. */
61#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
62
63/* Similar, but round to the next highest integer that meets the
64 alignment. */
65#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
66
67/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
68 during rtl generation. If they are different register numbers, this is
69 always true. It may also be true if
70 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
71 generation. See fix_lexical_addr for details. */
72
73#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
74#define NEED_SEPARATE_AP
75#endif
76
77/* Number of bytes of args popped by function being compiled on its return.
78 Zero if no bytes are to be popped.
79 May affect compilation of return insn or of function epilogue. */
80
81int current_function_pops_args;
82
83/* Nonzero if function being compiled needs to be given an address
84 where the value should be stored. */
85
86int current_function_returns_struct;
87
88/* Nonzero if function being compiled needs to
89 return the address of where it has put a structure value. */
90
91int current_function_returns_pcc_struct;
92
93/* Nonzero if function being compiled needs to be passed a static chain. */
94
95int current_function_needs_context;
96
97/* Nonzero if function being compiled can call setjmp. */
98
99int current_function_calls_setjmp;
100
101/* Nonzero if function being compiled can call longjmp. */
102
103int current_function_calls_longjmp;
104
105/* Nonzero if function being compiled receives nonlocal gotos
106 from nested functions. */
107
108int current_function_has_nonlocal_label;
109
110/* Nonzero if function being compiled contains nested functions. */
111
112int current_function_contains_functions;
113
114/* Nonzero if function being compiled can call alloca,
115 either as a subroutine or builtin. */
116
117int current_function_calls_alloca;
118
119/* Nonzero if the current function returns a pointer type */
120
121int current_function_returns_pointer;
122
123/* If some insns can be deferred to the delay slots of the epilogue, the
124 delay list for them is recorded here. */
125
126rtx current_function_epilogue_delay_list;
127
128/* If function's args have a fixed size, this is that size, in bytes.
129 Otherwise, it is -1.
130 May affect compilation of return insn or of function epilogue. */
131
132int current_function_args_size;
133
134/* # bytes the prologue should push and pretend that the caller pushed them.
135 The prologue must do this, but only if parms can be passed in registers. */
136
137int current_function_pretend_args_size;
138
139/* # of bytes of outgoing arguments required to be pushed by the prologue.
140 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
141 and no stack adjusts will be done on function calls. */
142
143int current_function_outgoing_args_size;
144
145/* This is the offset from the arg pointer to the place where the first
146 anonymous arg can be found, if there is one. */
147
148rtx current_function_arg_offset_rtx;
149
150/* Nonzero if current function uses varargs.h or equivalent.
151 Zero for functions that use stdarg.h. */
152
153int current_function_varargs;
154
155/* Quantities of various kinds of registers
156 used for the current function's args. */
157
158CUMULATIVE_ARGS current_function_args_info;
159
160/* Name of function now being compiled. */
161
162char *current_function_name;
163
164/* If non-zero, an RTL expression for that location at which the current
165 function returns its result. Always equal to
166 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
167 independently of the tree structures. */
168
169rtx current_function_return_rtx;
170
171/* Nonzero if the current function uses the constant pool. */
172
173int current_function_uses_const_pool;
174
175/* Nonzero if the current function uses pic_offset_table_rtx. */
176int current_function_uses_pic_offset_table;
177
178/* The arg pointer hard register, or the pseudo into which it was copied. */
179rtx current_function_internal_arg_pointer;
180
181/* The FUNCTION_DECL for an inline function currently being expanded. */
182tree inline_function_decl;
183
184/* Number of function calls seen so far in current function. */
185
186int function_call_count;
187
188/* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
189 (labels to which there can be nonlocal gotos from nested functions)
190 in this function. */
191
192tree nonlocal_labels;
193
194/* RTX for stack slot that holds the current handler for nonlocal gotos.
195 Zero when function does not have nonlocal labels. */
196
197rtx nonlocal_goto_handler_slot;
198
199/* RTX for stack slot that holds the stack pointer value to restore
200 for a nonlocal goto.
201 Zero when function does not have nonlocal labels. */
202
203rtx nonlocal_goto_stack_level;
204
205/* Label that will go on parm cleanup code, if any.
206 Jumping to this label runs cleanup code for parameters, if
207 such code must be run. Following this code is the logical return label. */
208
209rtx cleanup_label;
210
211/* Label that will go on function epilogue.
212 Jumping to this label serves as a "return" instruction
213 on machines which require execution of the epilogue on all returns. */
214
215rtx return_label;
216
217/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
218 So we can mark them all live at the end of the function, if nonopt. */
219rtx save_expr_regs;
220
221/* List (chain of EXPR_LISTs) of all stack slots in this function.
222 Made for the sake of unshare_all_rtl. */
223rtx stack_slot_list;
224
225/* Chain of all RTL_EXPRs that have insns in them. */
226tree rtl_expr_chain;
227
228/* Label to jump back to for tail recursion, or 0 if we have
229 not yet needed one for this function. */
230rtx tail_recursion_label;
231
232/* Place after which to insert the tail_recursion_label if we need one. */
233rtx tail_recursion_reentry;
234
235/* Location at which to save the argument pointer if it will need to be
236 referenced. There are two cases where this is done: if nonlocal gotos
237 exist, or if vars stored at an offset from the argument pointer will be
238 needed by inner routines. */
239
240rtx arg_pointer_save_area;
241
242/* Offset to end of allocated area of stack frame.
243 If stack grows down, this is the address of the last stack slot allocated.
244 If stack grows up, this is the address for the next slot. */
245int frame_offset;
246
247/* List (chain of TREE_LISTs) of static chains for containing functions.
248 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
249 in an RTL_EXPR in the TREE_VALUE. */
250static tree context_display;
251
252/* List (chain of TREE_LISTs) of trampolines for nested functions.
253 The trampoline sets up the static chain and jumps to the function.
254 We supply the trampoline's address when the function's address is requested.
255
256 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
257 in an RTL_EXPR in the TREE_VALUE. */
258static tree trampoline_list;
259
260/* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
261static rtx parm_birth_insn;
262
263#if 0
264/* Nonzero if a stack slot has been generated whose address is not
265 actually valid. It means that the generated rtl must all be scanned
266 to detect and correct the invalid addresses where they occur. */
267static int invalid_stack_slot;
268#endif
269
270/* Last insn of those whose job was to put parms into their nominal homes. */
271static rtx last_parm_insn;
272
273/* 1 + last pseudo register number used for loading a copy
274 of a parameter of this function. */
275static int max_parm_reg;
276
277/* Vector indexed by REGNO, containing location on stack in which
278 to put the parm which is nominally in pseudo register REGNO,
279 if we discover that that parm must go in the stack. */
280static rtx *parm_reg_stack_loc;
281
282#if 0 /* Turned off because 0 seems to work just as well. */
283/* Cleanup lists are required for binding levels regardless of whether
284 that binding level has cleanups or not. This node serves as the
285 cleanup list whenever an empty list is required. */
286static tree empty_cleanup_list;
287#endif
288
289/* Nonzero once virtual register instantiation has been done.
290 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
291static int virtuals_instantiated;
292
293/* Nonzero if we need to distinguish between the return value of this function
294 and the return value of a function called by this function. This helps
295 integrate.c */
296
297extern int rtx_equal_function_value_matters;
298
299void fixup_gotos ();
300
301static tree round_down ();
302static rtx round_trampoline_addr ();
303static rtx fixup_stack_1 ();
304static void fixup_var_refs ();
305static void fixup_var_refs_insns ();
306static void fixup_var_refs_1 ();
307static void optimize_bit_field ();
308static void instantiate_decls ();
309static void instantiate_decls_1 ();
310static int instantiate_virtual_regs_1 ();
311static rtx fixup_memory_subreg ();
312static rtx walk_fixup_memory_subreg ();
313\f
314/* In order to evaluate some expressions, such as function calls returning
315 structures in memory, we need to temporarily allocate stack locations.
316 We record each allocated temporary in the following structure.
317
318 Associated with each temporary slot is a nesting level. When we pop up
319 one level, all temporaries associated with the previous level are freed.
320 Normally, all temporaries are freed after the execution of the statement
321 in which they were created. However, if we are inside a ({...}) grouping,
322 the result may be in a temporary and hence must be preserved. If the
323 result could be in a temporary, we preserve it if we can determine which
324 one it is in. If we cannot determine which temporary may contain the
325 result, all temporaries are preserved. A temporary is preserved by
326 pretending it was allocated at the previous nesting level.
327
328 Automatic variables are also assigned temporary slots, at the nesting
329 level where they are defined. They are marked a "kept" so that
330 free_temp_slots will not free them. */
331
332struct temp_slot
333{
334 /* Points to next temporary slot. */
335 struct temp_slot *next;
336 /* The rtx to used to reference the slot. */
337 rtx slot;
338 /* The size, in units, of the slot. */
339 int size;
340 /* Non-zero if this temporary is currently in use. */
341 char in_use;
342 /* Nesting level at which this slot is being used. */
343 int level;
344 /* Non-zero if this should survive a call to free_temp_slots. */
345 int keep;
346};
347
348/* List of all temporaries allocated, both available and in use. */
349
350struct temp_slot *temp_slots;
351
352/* Current nesting level for temporaries. */
353
354int temp_slot_level;
355\f
356/* Pointer to chain of `struct function' for containing functions. */
357struct function *outer_function_chain;
358
359/* Given a function decl for a containing function,
360 return the `struct function' for it. */
361
362struct function *
363find_function_data (decl)
364 tree decl;
365{
366 struct function *p;
367 for (p = outer_function_chain; p; p = p->next)
368 if (p->decl == decl)
369 return p;
370 abort ();
371}
372
373/* Save the current context for compilation of a nested function.
374 This is called from language-specific code.
375 The caller is responsible for saving any language-specific status,
6dc42e49 376 since this function knows only about language-independent variables. */
6f086dfc
RS
377
378void
379push_function_context ()
380{
381 struct function *p = (struct function *) xmalloc (sizeof (struct function));
382
383 p->next = outer_function_chain;
384 outer_function_chain = p;
385
386 p->name = current_function_name;
387 p->decl = current_function_decl;
388 p->pops_args = current_function_pops_args;
389 p->returns_struct = current_function_returns_struct;
390 p->returns_pcc_struct = current_function_returns_pcc_struct;
391 p->needs_context = current_function_needs_context;
392 p->calls_setjmp = current_function_calls_setjmp;
393 p->calls_longjmp = current_function_calls_longjmp;
394 p->calls_alloca = current_function_calls_alloca;
395 p->has_nonlocal_label = current_function_has_nonlocal_label;
396 p->args_size = current_function_args_size;
397 p->pretend_args_size = current_function_pretend_args_size;
398 p->arg_offset_rtx = current_function_arg_offset_rtx;
399 p->uses_const_pool = current_function_uses_const_pool;
400 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
401 p->internal_arg_pointer = current_function_internal_arg_pointer;
402 p->max_parm_reg = max_parm_reg;
403 p->parm_reg_stack_loc = parm_reg_stack_loc;
404 p->outgoing_args_size = current_function_outgoing_args_size;
405 p->return_rtx = current_function_return_rtx;
406 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
407 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
408 p->nonlocal_labels = nonlocal_labels;
409 p->cleanup_label = cleanup_label;
410 p->return_label = return_label;
411 p->save_expr_regs = save_expr_regs;
412 p->stack_slot_list = stack_slot_list;
413 p->parm_birth_insn = parm_birth_insn;
414 p->frame_offset = frame_offset;
415 p->tail_recursion_label = tail_recursion_label;
416 p->tail_recursion_reentry = tail_recursion_reentry;
417 p->arg_pointer_save_area = arg_pointer_save_area;
418 p->rtl_expr_chain = rtl_expr_chain;
419 p->last_parm_insn = last_parm_insn;
420 p->context_display = context_display;
421 p->trampoline_list = trampoline_list;
422 p->function_call_count = function_call_count;
423 p->temp_slots = temp_slots;
424 p->temp_slot_level = temp_slot_level;
425 p->fixup_var_refs_queue = 0;
426
427 save_tree_status (p);
428 save_storage_status (p);
429 save_emit_status (p);
430 init_emit ();
431 save_expr_status (p);
432 save_stmt_status (p);
433}
434
435/* Restore the last saved context, at the end of a nested function.
436 This function is called from language-specific code. */
437
438void
439pop_function_context ()
440{
441 struct function *p = outer_function_chain;
442
443 outer_function_chain = p->next;
444
445 current_function_name = p->name;
446 current_function_decl = p->decl;
447 current_function_pops_args = p->pops_args;
448 current_function_returns_struct = p->returns_struct;
449 current_function_returns_pcc_struct = p->returns_pcc_struct;
450 current_function_needs_context = p->needs_context;
451 current_function_calls_setjmp = p->calls_setjmp;
452 current_function_calls_longjmp = p->calls_longjmp;
453 current_function_calls_alloca = p->calls_alloca;
454 current_function_has_nonlocal_label = p->has_nonlocal_label;
455 current_function_contains_functions = 1;
456 current_function_args_size = p->args_size;
457 current_function_pretend_args_size = p->pretend_args_size;
458 current_function_arg_offset_rtx = p->arg_offset_rtx;
459 current_function_uses_const_pool = p->uses_const_pool;
460 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
461 current_function_internal_arg_pointer = p->internal_arg_pointer;
462 max_parm_reg = p->max_parm_reg;
463 parm_reg_stack_loc = p->parm_reg_stack_loc;
464 current_function_outgoing_args_size = p->outgoing_args_size;
465 current_function_return_rtx = p->return_rtx;
466 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
467 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
468 nonlocal_labels = p->nonlocal_labels;
469 cleanup_label = p->cleanup_label;
470 return_label = p->return_label;
471 save_expr_regs = p->save_expr_regs;
472 stack_slot_list = p->stack_slot_list;
473 parm_birth_insn = p->parm_birth_insn;
474 frame_offset = p->frame_offset;
475 tail_recursion_label = p->tail_recursion_label;
476 tail_recursion_reentry = p->tail_recursion_reentry;
477 arg_pointer_save_area = p->arg_pointer_save_area;
478 rtl_expr_chain = p->rtl_expr_chain;
479 last_parm_insn = p->last_parm_insn;
480 context_display = p->context_display;
481 trampoline_list = p->trampoline_list;
482 function_call_count = p->function_call_count;
483 temp_slots = p->temp_slots;
484 temp_slot_level = p->temp_slot_level;
485
486 restore_tree_status (p);
487 restore_storage_status (p);
488 restore_expr_status (p);
489 restore_emit_status (p);
490 restore_stmt_status (p);
491
492 /* Finish doing put_var_into_stack for any of our variables
493 which became addressable during the nested function. */
494 {
495 struct var_refs_queue *queue = p->fixup_var_refs_queue;
496 for (; queue; queue = queue->next)
497 fixup_var_refs (queue->modified);
498 }
499
500 free (p);
501
502 /* Reset variables that have known state during rtx generation. */
503 rtx_equal_function_value_matters = 1;
504 virtuals_instantiated = 0;
505}
506\f
507/* Allocate fixed slots in the stack frame of the current function. */
508
509/* Return size needed for stack frame based on slots so far allocated.
510 This size counts from zero. It is not rounded to STACK_BOUNDARY;
511 the caller may have to do that. */
512
513int
514get_frame_size ()
515{
516#ifdef FRAME_GROWS_DOWNWARD
517 return -frame_offset;
518#else
519 return frame_offset;
520#endif
521}
522
523/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
524 with machine mode MODE.
525
526 ALIGN controls the amount of alignment for the address of the slot:
527 0 means according to MODE,
528 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
529 positive specifies alignment boundary in bits.
530
531 We do not round to stack_boundary here. */
532
533rtx
534assign_stack_local (mode, size, align)
535 enum machine_mode mode;
536 int size;
537 int align;
538{
539 register rtx x, addr;
540 int bigend_correction = 0;
541 int alignment;
542
543 if (align == 0)
544 {
545 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
546 if (mode == BLKmode)
547 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
548 }
549 else if (align == -1)
550 {
551 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
552 size = CEIL_ROUND (size, alignment);
553 }
554 else
555 alignment = align / BITS_PER_UNIT;
556
6f086dfc
RS
557 /* Round frame offset to that alignment.
558 We must be careful here, since FRAME_OFFSET might be negative and
559 division with a negative dividend isn't as well defined as we might
560 like. So we instead assume that ALIGNMENT is a power of two and
561 use logical operations which are unambiguous. */
562#ifdef FRAME_GROWS_DOWNWARD
563 frame_offset = FLOOR_ROUND (frame_offset, alignment);
564#else
565 frame_offset = CEIL_ROUND (frame_offset, alignment);
566#endif
567
568 /* On a big-endian machine, if we are allocating more space than we will use,
569 use the least significant bytes of those that are allocated. */
570#if BYTES_BIG_ENDIAN
571 if (mode != BLKmode)
572 bigend_correction = size - GET_MODE_SIZE (mode);
573#endif
574
575#ifdef FRAME_GROWS_DOWNWARD
576 frame_offset -= size;
577#endif
578
579 /* If we have already instantiated virtual registers, return the actual
580 address relative to the frame pointer. */
581 if (virtuals_instantiated)
582 addr = plus_constant (frame_pointer_rtx,
583 (frame_offset + bigend_correction
584 + STARTING_FRAME_OFFSET));
585 else
586 addr = plus_constant (virtual_stack_vars_rtx,
587 frame_offset + bigend_correction);
588
589#ifndef FRAME_GROWS_DOWNWARD
590 frame_offset += size;
591#endif
592
593 x = gen_rtx (MEM, mode, addr);
594
595 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
596
597 return x;
598}
599
600/* Assign a stack slot in a containing function.
601 First three arguments are same as in preceding function.
602 The last argument specifies the function to allocate in. */
603
604rtx
605assign_outer_stack_local (mode, size, align, function)
606 enum machine_mode mode;
607 int size;
608 int align;
609 struct function *function;
610{
611 register rtx x, addr;
612 int bigend_correction = 0;
613 int alignment;
614
615 /* Allocate in the memory associated with the function in whose frame
616 we are assigning. */
617 push_obstacks (function->function_obstack,
618 function->function_maybepermanent_obstack);
619
620 if (align == 0)
621 {
622 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
623 if (mode == BLKmode)
624 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
625 }
626 else if (align == -1)
627 {
628 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
629 size = CEIL_ROUND (size, alignment);
630 }
631 else
632 alignment = align / BITS_PER_UNIT;
633
6f086dfc
RS
634 /* Round frame offset to that alignment. */
635#ifdef FRAME_GROWS_DOWNWARD
636 frame_offset = FLOOR_ROUND (frame_offset, alignment);
637#else
638 frame_offset = CEIL_ROUND (frame_offset, alignment);
639#endif
640
641 /* On a big-endian machine, if we are allocating more space than we will use,
642 use the least significant bytes of those that are allocated. */
643#if BYTES_BIG_ENDIAN
644 if (mode != BLKmode)
645 bigend_correction = size - GET_MODE_SIZE (mode);
646#endif
647
648#ifdef FRAME_GROWS_DOWNWARD
649 function->frame_offset -= size;
650#endif
651 addr = plus_constant (virtual_stack_vars_rtx,
652 function->frame_offset + bigend_correction);
653#ifndef FRAME_GROWS_DOWNWARD
654 function->frame_offset += size;
655#endif
656
657 x = gen_rtx (MEM, mode, addr);
658
659 function->stack_slot_list
660 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
661
662 pop_obstacks ();
663
664 return x;
665}
666\f
667/* Allocate a temporary stack slot and record it for possible later
668 reuse.
669
670 MODE is the machine mode to be given to the returned rtx.
671
672 SIZE is the size in units of the space required. We do no rounding here
673 since assign_stack_local will do any required rounding.
674
675 KEEP is non-zero if this slot is to be retained after a call to
676 free_temp_slots. Automatic variables for a block are allocated with this
677 flag. */
678
679rtx
680assign_stack_temp (mode, size, keep)
681 enum machine_mode mode;
682 int size;
683 int keep;
684{
685 struct temp_slot *p, *best_p = 0;
686
687 /* First try to find an available, already-allocated temporary that is the
688 exact size we require. */
689 for (p = temp_slots; p; p = p->next)
690 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
691 break;
692
693 /* If we didn't find, one, try one that is larger than what we want. We
694 find the smallest such. */
695 if (p == 0)
696 for (p = temp_slots; p; p = p->next)
697 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
698 && (best_p == 0 || best_p->size > p->size))
699 best_p = p;
700
701 /* Make our best, if any, the one to use. */
702 if (best_p)
703 p = best_p;
704
705 /* If we still didn't find one, make a new temporary. */
706 if (p == 0)
707 {
708 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
709 p->size = size;
710 /* If the temp slot mode doesn't indicate the alignment,
711 use the largest possible, so no one will be disappointed. */
712 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
713 p->next = temp_slots;
714 temp_slots = p;
715 }
716
717 p->in_use = 1;
718 p->level = temp_slot_level;
719 p->keep = keep;
720 return p->slot;
721}
722\f
723/* If X could be a reference to a temporary slot, mark that slot as belonging
724 to the to one level higher. If X matched one of our slots, just mark that
725 one. Otherwise, we can't easily predict which it is, so upgrade all of
726 them. Kept slots need not be touched.
727
728 This is called when an ({...}) construct occurs and a statement
729 returns a value in memory. */
730
731void
732preserve_temp_slots (x)
733 rtx x;
734{
735 struct temp_slot *p;
736
737 /* If X is not in memory or is at a constant address, it cannot be in
738 a temporary slot. */
739 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
740 return;
741
742 /* First see if we can find a match. */
743 for (p = temp_slots; p; p = p->next)
744 if (p->in_use && x == p->slot)
745 {
746 p->level--;
747 return;
748 }
749
750 /* Otherwise, preserve all non-kept slots at this level. */
751 for (p = temp_slots; p; p = p->next)
752 if (p->in_use && p->level == temp_slot_level && ! p->keep)
753 p->level--;
754}
755
756/* Free all temporaries used so far. This is normally called at the end
757 of generating code for a statement. */
758
759void
760free_temp_slots ()
761{
762 struct temp_slot *p;
763
764 for (p = temp_slots; p; p = p->next)
765 if (p->in_use && p->level == temp_slot_level && ! p->keep)
766 p->in_use = 0;
767}
768
769/* Push deeper into the nesting level for stack temporaries. */
770
771void
772push_temp_slots ()
773{
774 /* For GNU C++, we must allow a sequence to be emitted anywhere in
775 the level where the sequence was started. By not changing levels
776 when the compiler is inside a sequence, the temporaries for the
777 sequence and the temporaries will not unwittingly conflict with
778 the temporaries for other sequences and/or code at that level. */
779 if (in_sequence_p ())
780 return;
781
782 temp_slot_level++;
783}
784
785/* Pop a temporary nesting level. All slots in use in the current level
786 are freed. */
787
788void
789pop_temp_slots ()
790{
791 struct temp_slot *p;
792
793 /* See comment in push_temp_slots about why we don't change levels
794 in sequences. */
795 if (in_sequence_p ())
796 return;
797
798 for (p = temp_slots; p; p = p->next)
799 if (p->in_use && p->level == temp_slot_level)
800 p->in_use = 0;
801
802 temp_slot_level--;
803}
804\f
805/* Retroactively move an auto variable from a register to a stack slot.
806 This is done when an address-reference to the variable is seen. */
807
808void
809put_var_into_stack (decl)
810 tree decl;
811{
812 register rtx reg;
813 register rtx new = 0;
814 struct function *function = 0;
815 tree context = decl_function_context (decl);
816
817 /* Get the current rtl used for this object. */
818 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
819
820 /* If this variable comes from an outer function,
821 find that function's saved context. */
822 if (context != current_function_decl)
823 for (function = outer_function_chain; function; function = function->next)
824 if (function->decl == context)
825 break;
826
827 /* No need to do anything if decl has no rtx yet
828 since in that case caller is setting TREE_ADDRESSABLE
829 and a stack slot will be assigned when the rtl is made. */
830 if (reg == 0)
831 return;
832
833 /* If this is a variable-size object with a pseudo to address it,
834 put that pseudo into the stack, if the var is nonlocal. */
a82ad570 835 if (DECL_NONLOCAL (decl)
6f086dfc
RS
836 && GET_CODE (reg) == MEM
837 && GET_CODE (XEXP (reg, 0)) == REG
838 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
839 reg = XEXP (reg, 0);
840 if (GET_CODE (reg) != REG)
841 return;
842
843 if (function)
844 {
845 if (REGNO (reg) < function->max_parm_reg)
846 new = function->parm_reg_stack_loc[REGNO (reg)];
847 if (new == 0)
848 new = assign_outer_stack_local (GET_MODE (reg),
849 GET_MODE_SIZE (GET_MODE (reg)),
850 0, function);
851 }
852 else
853 {
854 if (REGNO (reg) < max_parm_reg)
855 new = parm_reg_stack_loc[REGNO (reg)];
856 if (new == 0)
857 new = assign_stack_local (GET_MODE (reg),
858 GET_MODE_SIZE (GET_MODE (reg)),
859 0);
860 }
861
862 XEXP (reg, 0) = XEXP (new, 0);
863 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
864 REG_USERVAR_P (reg) = 0;
865 PUT_CODE (reg, MEM);
866
867 /* If this is a memory ref that contains aggregate components,
868 mark it as such for cse and loop optimize. */
869 MEM_IN_STRUCT_P (reg)
870 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
871 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
872 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
873
874 /* Now make sure that all refs to the variable, previously made
875 when it was a register, are fixed up to be valid again. */
876 if (function)
877 {
878 struct var_refs_queue *temp;
879
880 /* Variable is inherited; fix it up when we get back to its function. */
881 push_obstacks (function->function_obstack,
882 function->function_maybepermanent_obstack);
883 temp
884 = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue));
885 temp->modified = reg;
886 temp->next = function->fixup_var_refs_queue;
887 function->fixup_var_refs_queue = temp;
888 pop_obstacks ();
889 }
890 else
891 /* Variable is local; fix it up now. */
892 fixup_var_refs (reg);
893}
894\f
895static void
896fixup_var_refs (var)
897 rtx var;
898{
899 tree pending;
900 rtx first_insn = get_insns ();
901 struct sequence_stack *stack = sequence_stack;
902 tree rtl_exps = rtl_expr_chain;
903
904 /* Must scan all insns for stack-refs that exceed the limit. */
905 fixup_var_refs_insns (var, first_insn, stack == 0);
906
907 /* Scan all pending sequences too. */
908 for (; stack; stack = stack->next)
909 {
910 push_to_sequence (stack->first);
911 fixup_var_refs_insns (var, stack->first, stack->next != 0);
912 /* Update remembered end of sequence
913 in case we added an insn at the end. */
914 stack->last = get_last_insn ();
915 end_sequence ();
916 }
917
918 /* Scan all waiting RTL_EXPRs too. */
919 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
920 {
921 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
922 if (seq != const0_rtx && seq != 0)
923 {
924 push_to_sequence (seq);
925 fixup_var_refs_insns (var, seq, 0);
926 end_sequence ();
927 }
928 }
929}
930\f
931/* This structure is used by the following two functions to record MEMs or
932 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
933 VAR as an address. We need to maintain this list in case two operands of
934 an insn were required to match; in that case we must ensure we use the
935 same replacement. */
936
937struct fixup_replacement
938{
939 rtx old;
940 rtx new;
941 struct fixup_replacement *next;
942};
943
944/* REPLACEMENTS is a pointer to a list of the above structures and X is
945 some part of an insn. Return a struct fixup_replacement whose OLD
946 value is equal to X. Allocate a new structure if no such entry exists. */
947
948static struct fixup_replacement *
949find_replacement (replacements, x)
950 struct fixup_replacement **replacements;
951 rtx x;
952{
953 struct fixup_replacement *p;
954
955 /* See if we have already replaced this. */
956 for (p = *replacements; p && p->old != x; p = p->next)
957 ;
958
959 if (p == 0)
960 {
961 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
962 p->old = x;
963 p->new = 0;
964 p->next = *replacements;
965 *replacements = p;
966 }
967
968 return p;
969}
970
971/* Scan the insn-chain starting with INSN for refs to VAR
972 and fix them up. TOPLEVEL is nonzero if this chain is the
973 main chain of insns for the current function. */
974
975static void
976fixup_var_refs_insns (var, insn, toplevel)
977 rtx var;
978 rtx insn;
979 int toplevel;
980{
981 while (insn)
982 {
983 rtx next = NEXT_INSN (insn);
984 rtx note;
985 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
986 || GET_CODE (insn) == JUMP_INSN)
987 {
988 /* The insn to load VAR from a home in the arglist
989 is now a no-op. When we see it, just delete it. */
990 if (toplevel
991 && GET_CODE (PATTERN (insn)) == SET
992 && SET_DEST (PATTERN (insn)) == var
993 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
994 {
995 next = delete_insn (insn);
996 if (insn == last_parm_insn)
997 last_parm_insn = PREV_INSN (next);
998 }
999 else
1000 {
1001 /* See if we have to do anything to INSN now that VAR is in
1002 memory. If it needs to be loaded into a pseudo, use a single
1003 pseudo for the entire insn in case there is a MATCH_DUP
1004 between two operands. We pass a pointer to the head of
1005 a list of struct fixup_replacements. If fixup_var_refs_1
1006 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1007 it will record them in this list.
1008
1009 If it allocated a pseudo for any replacement, we copy into
1010 it here. */
1011
1012 struct fixup_replacement *replacements = 0;
1013
1014 fixup_var_refs_1 (var, &PATTERN (insn), insn, &replacements);
1015
1016 while (replacements)
1017 {
1018 if (GET_CODE (replacements->new) == REG)
1019 {
1020 rtx insert_before;
1021
1022 /* OLD might be a (subreg (mem)). */
1023 if (GET_CODE (replacements->old) == SUBREG)
1024 replacements->old
1025 = fixup_memory_subreg (replacements->old, insn, 0);
1026 else
1027 replacements->old
1028 = fixup_stack_1 (replacements->old, insn);
1029
1030 /* We can not separate USE insns from the CALL_INSN
1031 that they belong to. If this is a CALL_INSN, insert
b335c2cc 1032 the move insn before the USE insns preceding it
6f086dfc
RS
1033 instead of immediately before the insn. */
1034 if (GET_CODE (insn) == CALL_INSN)
1035 {
1036 insert_before = insn;
1037 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1038 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1039 insert_before = PREV_INSN (insert_before);
1040 }
1041 else
1042 insert_before = insn;
1043
1044 emit_insn_before (gen_move_insn (replacements->new,
1045 replacements->old),
1046 insert_before);
1047 }
1048
1049 replacements = replacements->next;
1050 }
1051 }
1052
1053 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1054 But don't touch other insns referred to by reg-notes;
1055 we will get them elsewhere. */
1056 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1057 if (GET_CODE (note) != INSN_LIST)
1058 XEXP (note, 0) = walk_fixup_memory_subreg (XEXP (note, 0), insn);
1059 }
1060 insn = next;
1061 }
1062}
1063\f
1064/* VAR is a MEM that used to be a pseudo register. See if the rtx expression
1065 at *LOC in INSN needs to be changed.
1066
1067 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1068 contain a list of original rtx's and replacements. If we find that we need
1069 to modify this insn by replacing a memory reference with a pseudo or by
1070 making a new MEM to implement a SUBREG, we consult that list to see if
1071 we have already chosen a replacement. If none has already been allocated,
1072 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1073 or the SUBREG, as appropriate, to the pseudo. */
1074
1075static void
1076fixup_var_refs_1 (var, loc, insn, replacements)
1077 register rtx var;
1078 register rtx *loc;
1079 rtx insn;
1080 struct fixup_replacement **replacements;
1081{
1082 register int i;
1083 register rtx x = *loc;
1084 RTX_CODE code = GET_CODE (x);
1085 register char *fmt;
1086 register rtx tem, tem1;
1087 struct fixup_replacement *replacement;
1088
1089 switch (code)
1090 {
1091 case MEM:
1092 if (var == x)
1093 {
1094 /* If we already have a replacement, use it. Otherwise,
1095 try to fix up this address in case it is invalid. */
1096
1097 replacement = find_replacement (replacements, var);
1098 if (replacement->new)
1099 {
1100 *loc = replacement->new;
1101 return;
1102 }
1103
1104 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1105
1106 /* Unless we are forcing memory to register, we can leave things
1107 the way they are if the insn is valid. */
1108
1109 INSN_CODE (insn) = -1;
1110 if (! flag_force_mem && recog_memoized (insn) >= 0)
1111 return;
1112
1113 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1114 return;
1115 }
1116
1117 /* If X contains VAR, we need to unshare it here so that we update
1118 each occurrence separately. But all identical MEMs in one insn
1119 must be replaced with the same rtx because of the possibility of
1120 MATCH_DUPs. */
1121
1122 if (reg_mentioned_p (var, x))
1123 {
1124 replacement = find_replacement (replacements, x);
1125 if (replacement->new == 0)
1126 replacement->new = copy_most_rtx (x, var);
1127
1128 *loc = x = replacement->new;
1129 }
1130 break;
1131
1132 case REG:
1133 case CC0:
1134 case PC:
1135 case CONST_INT:
1136 case CONST:
1137 case SYMBOL_REF:
1138 case LABEL_REF:
1139 case CONST_DOUBLE:
1140 return;
1141
1142 case SIGN_EXTRACT:
1143 case ZERO_EXTRACT:
1144 /* Note that in some cases those types of expressions are altered
1145 by optimize_bit_field, and do not survive to get here. */
1146 if (XEXP (x, 0) == var
1147 || (GET_CODE (XEXP (x, 0)) == SUBREG
1148 && SUBREG_REG (XEXP (x, 0)) == var))
1149 {
1150 /* Get TEM as a valid MEM in the mode presently in the insn.
1151
1152 We don't worry about the possibility of MATCH_DUP here; it
1153 is highly unlikely and would be tricky to handle. */
1154
1155 tem = XEXP (x, 0);
1156 if (GET_CODE (tem) == SUBREG)
1157 tem = fixup_memory_subreg (tem, insn, 1);
1158 tem = fixup_stack_1 (tem, insn);
1159
1160 /* Unless we want to load from memory, get TEM into the proper mode
1161 for an extract from memory. This can only be done if the
1162 extract is at a constant position and length. */
1163
1164 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1165 && GET_CODE (XEXP (x, 2)) == CONST_INT
1166 && ! mode_dependent_address_p (XEXP (tem, 0))
1167 && ! MEM_VOLATILE_P (tem))
1168 {
1169 enum machine_mode wanted_mode = VOIDmode;
1170 enum machine_mode is_mode = GET_MODE (tem);
1171 int width = INTVAL (XEXP (x, 1));
1172 int pos = INTVAL (XEXP (x, 2));
1173
1174#ifdef HAVE_extzv
1175 if (GET_CODE (x) == ZERO_EXTRACT)
1176 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1177#endif
1178#ifdef HAVE_extv
1179 if (GET_CODE (x) == SIGN_EXTRACT)
1180 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1181#endif
6dc42e49 1182 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
1183 if (wanted_mode != VOIDmode
1184 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1185 {
1186 int offset = pos / BITS_PER_UNIT;
1187 rtx old_pos = XEXP (x, 2);
1188 rtx newmem;
1189
1190 /* If the bytes and bits are counted differently, we
1191 must adjust the offset. */
1192#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1193 offset = (GET_MODE_SIZE (is_mode)
1194 - GET_MODE_SIZE (wanted_mode) - offset);
1195#endif
1196
1197 pos %= GET_MODE_BITSIZE (wanted_mode);
1198
1199 newmem = gen_rtx (MEM, wanted_mode,
1200 plus_constant (XEXP (tem, 0), offset));
1201 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1202 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1203 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1204
1205 /* Make the change and see if the insn remains valid. */
1206 INSN_CODE (insn) = -1;
1207 XEXP (x, 0) = newmem;
5f4f0e22 1208 XEXP (x, 2) = GEN_INT (pos);
6f086dfc
RS
1209
1210 if (recog_memoized (insn) >= 0)
1211 return;
1212
1213 /* Otherwise, restore old position. XEXP (x, 0) will be
1214 restored later. */
1215 XEXP (x, 2) = old_pos;
1216 }
1217 }
1218
1219 /* If we get here, the bitfield extract insn can't accept a memory
1220 reference. Copy the input into a register. */
1221
1222 tem1 = gen_reg_rtx (GET_MODE (tem));
1223 emit_insn_before (gen_move_insn (tem1, tem), insn);
1224 XEXP (x, 0) = tem1;
1225 return;
1226 }
1227 break;
1228
1229 case SUBREG:
1230 if (SUBREG_REG (x) == var)
1231 {
1232 /* If this SUBREG makes VAR wider, it has become a paradoxical
1233 SUBREG with VAR in memory, but these aren't allowed at this
1234 stage of the compilation. So load VAR into a pseudo and take
1235 a SUBREG of that pseudo. */
1236 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1237 {
1238 replacement = find_replacement (replacements, var);
1239 if (replacement->new == 0)
1240 replacement->new = gen_reg_rtx (GET_MODE (var));
1241 SUBREG_REG (x) = replacement->new;
1242 return;
1243 }
1244
1245 /* See if we have already found a replacement for this SUBREG.
1246 If so, use it. Otherwise, make a MEM and see if the insn
1247 is recognized. If not, or if we should force MEM into a register,
1248 make a pseudo for this SUBREG. */
1249 replacement = find_replacement (replacements, x);
1250 if (replacement->new)
1251 {
1252 *loc = replacement->new;
1253 return;
1254 }
1255
1256 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1257
1258 if (! flag_force_mem && recog_memoized (insn) >= 0)
1259 return;
1260
1261 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1262 return;
1263 }
1264 break;
1265
1266 case SET:
1267 /* First do special simplification of bit-field references. */
1268 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1269 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1270 optimize_bit_field (x, insn, 0);
1271 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1272 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
5f4f0e22 1273 optimize_bit_field (x, insn, NULL_PTR);
6f086dfc
RS
1274
1275 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1276 insn into a pseudo and store the low part of the pseudo into VAR. */
1277 if (GET_CODE (SET_DEST (x)) == SUBREG
1278 && SUBREG_REG (SET_DEST (x)) == var
1279 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1280 > GET_MODE_SIZE (GET_MODE (var))))
1281 {
1282 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1283 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1284 tem)),
1285 insn);
1286 break;
1287 }
1288
1289 {
1290 rtx dest = SET_DEST (x);
1291 rtx src = SET_SRC (x);
1292 rtx outerdest = dest;
1293
1294 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1295 || GET_CODE (dest) == SIGN_EXTRACT
1296 || GET_CODE (dest) == ZERO_EXTRACT)
1297 dest = XEXP (dest, 0);
1298
1299 if (GET_CODE (src) == SUBREG)
1300 src = XEXP (src, 0);
1301
1302 /* If VAR does not appear at the top level of the SET
1303 just scan the lower levels of the tree. */
1304
1305 if (src != var && dest != var)
1306 break;
1307
1308 /* We will need to rerecognize this insn. */
1309 INSN_CODE (insn) = -1;
1310
1311#ifdef HAVE_insv
1312 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1313 {
1314 /* Since this case will return, ensure we fixup all the
1315 operands here. */
1316 fixup_var_refs_1 (var, &XEXP (outerdest, 1), insn, replacements);
1317 fixup_var_refs_1 (var, &XEXP (outerdest, 2), insn, replacements);
1318 fixup_var_refs_1 (var, &SET_SRC (x), insn, replacements);
1319
1320 tem = XEXP (outerdest, 0);
1321
1322 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1323 that may appear inside a ZERO_EXTRACT.
1324 This was legitimate when the MEM was a REG. */
1325 if (GET_CODE (tem) == SUBREG
1326 && SUBREG_REG (tem) == var)
1327 tem = fixup_memory_subreg (tem, insn, 1);
1328 else
1329 tem = fixup_stack_1 (tem, insn);
1330
1331 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1332 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1333 && ! mode_dependent_address_p (XEXP (tem, 0))
1334 && ! MEM_VOLATILE_P (tem))
1335 {
1336 enum machine_mode wanted_mode
1337 = insn_operand_mode[(int) CODE_FOR_insv][0];
1338 enum machine_mode is_mode = GET_MODE (tem);
1339 int width = INTVAL (XEXP (outerdest, 1));
1340 int pos = INTVAL (XEXP (outerdest, 2));
1341
6dc42e49 1342 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
1343 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1344 {
1345 int offset = pos / BITS_PER_UNIT;
1346 rtx old_pos = XEXP (outerdest, 2);
1347 rtx newmem;
1348
1349#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1350 offset = (GET_MODE_SIZE (is_mode)
1351 - GET_MODE_SIZE (wanted_mode) - offset);
1352#endif
1353
1354 pos %= GET_MODE_BITSIZE (wanted_mode);
1355
1356 newmem = gen_rtx (MEM, wanted_mode,
1357 plus_constant (XEXP (tem, 0), offset));
1358 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1359 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1360 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1361
1362 /* Make the change and see if the insn remains valid. */
1363 INSN_CODE (insn) = -1;
1364 XEXP (outerdest, 0) = newmem;
5f4f0e22 1365 XEXP (outerdest, 2) = GEN_INT (pos);
6f086dfc
RS
1366
1367 if (recog_memoized (insn) >= 0)
1368 return;
1369
1370 /* Otherwise, restore old position. XEXP (x, 0) will be
1371 restored later. */
1372 XEXP (outerdest, 2) = old_pos;
1373 }
1374 }
1375
1376 /* If we get here, the bit-field store doesn't allow memory
1377 or isn't located at a constant position. Load the value into
1378 a register, do the store, and put it back into memory. */
1379
1380 tem1 = gen_reg_rtx (GET_MODE (tem));
1381 emit_insn_before (gen_move_insn (tem1, tem), insn);
1382 emit_insn_after (gen_move_insn (tem, tem1), insn);
1383 XEXP (outerdest, 0) = tem1;
1384 return;
1385 }
1386#endif
1387
1388 /* STRICT_LOW_PART is a no-op on memory references
1389 and it can cause combinations to be unrecognizable,
1390 so eliminate it. */
1391
1392 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1393 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1394
1395 /* A valid insn to copy VAR into or out of a register
1396 must be left alone, to avoid an infinite loop here.
1397 If the reference to VAR is by a subreg, fix that up,
1398 since SUBREG is not valid for a memref.
1399 Also fix up the address of the stack slot. */
1400
1401 if ((SET_SRC (x) == var
1402 || (GET_CODE (SET_SRC (x)) == SUBREG
1403 && SUBREG_REG (SET_SRC (x)) == var))
1404 && (GET_CODE (SET_DEST (x)) == REG
1405 || (GET_CODE (SET_DEST (x)) == SUBREG
1406 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1407 && recog_memoized (insn) >= 0)
1408 {
1409 replacement = find_replacement (replacements, SET_SRC (x));
1410 if (replacement->new)
1411 {
1412 SET_SRC (x) = replacement->new;
1413 return;
1414 }
1415 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1416 SET_SRC (x) = replacement->new
1417 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1418 else
1419 SET_SRC (x) = replacement->new
1420 = fixup_stack_1 (SET_SRC (x), insn);
1421 return;
1422 }
1423
1424 if ((SET_DEST (x) == var
1425 || (GET_CODE (SET_DEST (x)) == SUBREG
1426 && SUBREG_REG (SET_DEST (x)) == var))
1427 && (GET_CODE (SET_SRC (x)) == REG
1428 || (GET_CODE (SET_SRC (x)) == SUBREG
1429 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1430 && recog_memoized (insn) >= 0)
1431 {
1432 if (GET_CODE (SET_DEST (x)) == SUBREG)
1433 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1434 else
1435 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1436 return;
1437 }
1438
1439 /* Otherwise, storing into VAR must be handled specially
1440 by storing into a temporary and copying that into VAR
1441 with a new insn after this one. */
1442
1443 if (dest == var)
1444 {
1445 rtx temp;
1446 rtx fixeddest;
1447 tem = SET_DEST (x);
1448 /* STRICT_LOW_PART can be discarded, around a MEM. */
1449 if (GET_CODE (tem) == STRICT_LOW_PART)
1450 tem = XEXP (tem, 0);
1451 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
1452 if (GET_CODE (tem) == SUBREG)
1453 fixeddest = fixup_memory_subreg (tem, insn, 0);
1454 else
1455 fixeddest = fixup_stack_1 (tem, insn);
1456
1457 temp = gen_reg_rtx (GET_MODE (tem));
1458 emit_insn_after (gen_move_insn (fixeddest, temp), insn);
1459 SET_DEST (x) = temp;
1460 }
1461 }
1462 }
1463
1464 /* Nothing special about this RTX; fix its operands. */
1465
1466 fmt = GET_RTX_FORMAT (code);
1467 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1468 {
1469 if (fmt[i] == 'e')
1470 fixup_var_refs_1 (var, &XEXP (x, i), insn, replacements);
1471 if (fmt[i] == 'E')
1472 {
1473 register int j;
1474 for (j = 0; j < XVECLEN (x, i); j++)
1475 fixup_var_refs_1 (var, &XVECEXP (x, i, j), insn, replacements);
1476 }
1477 }
1478}
1479\f
1480/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1481 return an rtx (MEM:m1 newaddr) which is equivalent.
1482 If any insns must be emitted to compute NEWADDR, put them before INSN.
1483
1484 UNCRITICAL nonzero means accept paradoxical subregs.
1485 This is used for subregs found inside of ZERO_EXTRACTs. */
1486
1487static rtx
1488fixup_memory_subreg (x, insn, uncritical)
1489 rtx x;
1490 rtx insn;
1491 int uncritical;
1492{
1493 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1494 rtx addr = XEXP (SUBREG_REG (x), 0);
1495 enum machine_mode mode = GET_MODE (x);
1496 rtx saved, result;
1497
1498 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1499 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1500 && ! uncritical)
1501 abort ();
1502
1503#if BYTES_BIG_ENDIAN
1504 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1505 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1506#endif
1507 addr = plus_constant (addr, offset);
1508 if (!flag_force_addr && memory_address_p (mode, addr))
1509 /* Shortcut if no insns need be emitted. */
1510 return change_address (SUBREG_REG (x), mode, addr);
1511 start_sequence ();
1512 result = change_address (SUBREG_REG (x), mode, addr);
1513 emit_insn_before (gen_sequence (), insn);
1514 end_sequence ();
1515 return result;
1516}
1517
1518/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1519 Replace subexpressions of X in place.
1520 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1521 Otherwise return X, with its contents possibly altered.
1522
1523 If any insns must be emitted to compute NEWADDR, put them before INSN. */
1524
1525static rtx
1526walk_fixup_memory_subreg (x, insn)
1527 register rtx x;
1528 rtx insn;
1529{
1530 register enum rtx_code code;
1531 register char *fmt;
1532 register int i;
1533
1534 if (x == 0)
1535 return 0;
1536
1537 code = GET_CODE (x);
1538
1539 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
1540 return fixup_memory_subreg (x, insn, 0);
1541
1542 /* Nothing special about this RTX; fix its operands. */
1543
1544 fmt = GET_RTX_FORMAT (code);
1545 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1546 {
1547 if (fmt[i] == 'e')
1548 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn);
1549 if (fmt[i] == 'E')
1550 {
1551 register int j;
1552 for (j = 0; j < XVECLEN (x, i); j++)
1553 XVECEXP (x, i, j)
1554 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn);
1555 }
1556 }
1557 return x;
1558}
1559\f
1560#if 0
1561/* Fix up any references to stack slots that are invalid memory addresses
1562 because they exceed the maximum range of a displacement. */
1563
1564void
1565fixup_stack_slots ()
1566{
1567 register rtx insn;
1568
1569 /* Did we generate a stack slot that is out of range
1570 or otherwise has an invalid address? */
1571 if (invalid_stack_slot)
1572 {
1573 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1574 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1575 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1576 || GET_CODE (insn) == JUMP_INSN)
1577 fixup_stack_1 (PATTERN (insn), insn);
1578 }
1579}
1580#endif
1581
1582/* For each memory ref within X, if it refers to a stack slot
1583 with an out of range displacement, put the address in a temp register
1584 (emitting new insns before INSN to load these registers)
1585 and alter the memory ref to use that register.
1586 Replace each such MEM rtx with a copy, to avoid clobberage. */
1587
1588static rtx
1589fixup_stack_1 (x, insn)
1590 rtx x;
1591 rtx insn;
1592{
1593 register int i;
1594 register RTX_CODE code = GET_CODE (x);
1595 register char *fmt;
1596
1597 if (code == MEM)
1598 {
1599 register rtx ad = XEXP (x, 0);
1600 /* If we have address of a stack slot but it's not valid
1601 (displacement is too large), compute the sum in a register. */
1602 if (GET_CODE (ad) == PLUS
1603 && GET_CODE (XEXP (ad, 0)) == REG
1604 && REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1605 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER
1606 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1607 {
1608 rtx temp, seq;
1609 if (memory_address_p (GET_MODE (x), ad))
1610 return x;
1611
1612 start_sequence ();
1613 temp = copy_to_reg (ad);
1614 seq = gen_sequence ();
1615 end_sequence ();
1616 emit_insn_before (seq, insn);
1617 return change_address (x, VOIDmode, temp);
1618 }
1619 return x;
1620 }
1621
1622 fmt = GET_RTX_FORMAT (code);
1623 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1624 {
1625 if (fmt[i] == 'e')
1626 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1627 if (fmt[i] == 'E')
1628 {
1629 register int j;
1630 for (j = 0; j < XVECLEN (x, i); j++)
1631 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
1632 }
1633 }
1634 return x;
1635}
1636\f
1637/* Optimization: a bit-field instruction whose field
1638 happens to be a byte or halfword in memory
1639 can be changed to a move instruction.
1640
1641 We call here when INSN is an insn to examine or store into a bit-field.
1642 BODY is the SET-rtx to be altered.
1643
1644 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1645 (Currently this is called only from function.c, and EQUIV_MEM
1646 is always 0.) */
1647
1648static void
1649optimize_bit_field (body, insn, equiv_mem)
1650 rtx body;
1651 rtx insn;
1652 rtx *equiv_mem;
1653{
1654 register rtx bitfield;
1655 int destflag;
1656 rtx seq = 0;
1657 enum machine_mode mode;
1658
1659 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
1660 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
1661 bitfield = SET_DEST (body), destflag = 1;
1662 else
1663 bitfield = SET_SRC (body), destflag = 0;
1664
1665 /* First check that the field being stored has constant size and position
1666 and is in fact a byte or halfword suitably aligned. */
1667
1668 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
1669 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
1670 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
1671 != BLKmode)
1672 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
1673 {
1674 register rtx memref = 0;
1675
1676 /* Now check that the containing word is memory, not a register,
1677 and that it is safe to change the machine mode. */
1678
1679 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
1680 memref = XEXP (bitfield, 0);
1681 else if (GET_CODE (XEXP (bitfield, 0)) == REG
1682 && equiv_mem != 0)
1683 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
1684 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1685 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
1686 memref = SUBREG_REG (XEXP (bitfield, 0));
1687 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1688 && equiv_mem != 0
1689 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
1690 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
1691
1692 if (memref
1693 && ! mode_dependent_address_p (XEXP (memref, 0))
1694 && ! MEM_VOLATILE_P (memref))
1695 {
1696 /* Now adjust the address, first for any subreg'ing
1697 that we are now getting rid of,
1698 and then for which byte of the word is wanted. */
1699
1700 register int offset = INTVAL (XEXP (bitfield, 2));
1701 /* Adjust OFFSET to count bits from low-address byte. */
1702#if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1703 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
1704 - offset - INTVAL (XEXP (bitfield, 1)));
1705#endif
1706 /* Adjust OFFSET to count bytes from low-address byte. */
1707 offset /= BITS_PER_UNIT;
1708 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
1709 {
1710 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
1711#if BYTES_BIG_ENDIAN
1712 offset -= (MIN (UNITS_PER_WORD,
1713 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
1714 - MIN (UNITS_PER_WORD,
1715 GET_MODE_SIZE (GET_MODE (memref))));
1716#endif
1717 }
1718
1719 memref = change_address (memref, mode,
1720 plus_constant (XEXP (memref, 0), offset));
1721
1722 /* Store this memory reference where
1723 we found the bit field reference. */
1724
1725 if (destflag)
1726 {
1727 validate_change (insn, &SET_DEST (body), memref, 1);
1728 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
1729 {
1730 rtx src = SET_SRC (body);
1731 while (GET_CODE (src) == SUBREG
1732 && SUBREG_WORD (src) == 0)
1733 src = SUBREG_REG (src);
1734 if (GET_MODE (src) != GET_MODE (memref))
1735 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
1736 validate_change (insn, &SET_SRC (body), src, 1);
1737 }
1738 else if (GET_MODE (SET_SRC (body)) != VOIDmode
1739 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
1740 /* This shouldn't happen because anything that didn't have
1741 one of these modes should have got converted explicitly
1742 and then referenced through a subreg.
1743 This is so because the original bit-field was
1744 handled by agg_mode and so its tree structure had
1745 the same mode that memref now has. */
1746 abort ();
1747 }
1748 else
1749 {
1750 rtx dest = SET_DEST (body);
1751
1752 while (GET_CODE (dest) == SUBREG
1753 && SUBREG_WORD (dest) == 0)
1754 dest = SUBREG_REG (dest);
1755
1756 validate_change (insn, &SET_DEST (body), dest, 1);
1757
1758 if (GET_MODE (dest) == GET_MODE (memref))
1759 validate_change (insn, &SET_SRC (body), memref, 1);
1760 else
1761 {
1762 /* Convert the mem ref to the destination mode. */
1763 rtx newreg = gen_reg_rtx (GET_MODE (dest));
1764
1765 start_sequence ();
1766 convert_move (newreg, memref,
1767 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
1768 seq = get_insns ();
1769 end_sequence ();
1770
1771 validate_change (insn, &SET_SRC (body), newreg, 1);
1772 }
1773 }
1774
1775 /* See if we can convert this extraction or insertion into
1776 a simple move insn. We might not be able to do so if this
1777 was, for example, part of a PARALLEL.
1778
1779 If we succeed, write out any needed conversions. If we fail,
1780 it is hard to guess why we failed, so don't do anything
1781 special; just let the optimization be suppressed. */
1782
1783 if (apply_change_group () && seq)
1784 emit_insns_before (seq, insn);
1785 }
1786 }
1787}
1788\f
1789/* These routines are responsible for converting virtual register references
1790 to the actual hard register references once RTL generation is complete.
1791
1792 The following four variables are used for communication between the
1793 routines. They contain the offsets of the virtual registers from their
1794 respective hard registers. */
1795
1796static int in_arg_offset;
1797static int var_offset;
1798static int dynamic_offset;
1799static int out_arg_offset;
1800
1801/* In most machines, the stack pointer register is equivalent to the bottom
1802 of the stack. */
1803
1804#ifndef STACK_POINTER_OFFSET
1805#define STACK_POINTER_OFFSET 0
1806#endif
1807
1808/* If not defined, pick an appropriate default for the offset of dynamically
1809 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1810 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1811
1812#ifndef STACK_DYNAMIC_OFFSET
1813
1814#ifdef ACCUMULATE_OUTGOING_ARGS
1815/* The bottom of the stack points to the actual arguments. If
1816 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1817 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1818 stack space for register parameters is not pushed by the caller, but
1819 rather part of the fixed stack areas and hence not included in
1820 `current_function_outgoing_args_size'. Nevertheless, we must allow
1821 for it when allocating stack dynamic objects. */
1822
1823#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1824#define STACK_DYNAMIC_OFFSET(FNDECL) \
1825(current_function_outgoing_args_size \
1826 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
1827
1828#else
1829#define STACK_DYNAMIC_OFFSET(FNDECL) \
1830(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
1831#endif
1832
1833#else
1834#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
1835#endif
1836#endif
1837
1838/* Pass through the INSNS of function FNDECL and convert virtual register
1839 references to hard register references. */
1840
1841void
1842instantiate_virtual_regs (fndecl, insns)
1843 tree fndecl;
1844 rtx insns;
1845{
1846 rtx insn;
1847
1848 /* Compute the offsets to use for this function. */
1849 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
1850 var_offset = STARTING_FRAME_OFFSET;
1851 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
1852 out_arg_offset = STACK_POINTER_OFFSET;
1853
1854 /* Scan all variables and parameters of this function. For each that is
1855 in memory, instantiate all virtual registers if the result is a valid
1856 address. If not, we do it later. That will handle most uses of virtual
1857 regs on many machines. */
1858 instantiate_decls (fndecl, 1);
1859
1860 /* Initialize recognition, indicating that volatile is OK. */
1861 init_recog ();
1862
1863 /* Scan through all the insns, instantiating every virtual register still
1864 present. */
1865 for (insn = insns; insn; insn = NEXT_INSN (insn))
1866 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1867 || GET_CODE (insn) == CALL_INSN)
1868 {
1869 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
5f4f0e22 1870 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
6f086dfc
RS
1871 }
1872
1873 /* Now instantiate the remaining register equivalences for debugging info.
1874 These will not be valid addresses. */
1875 instantiate_decls (fndecl, 0);
1876
1877 /* Indicate that, from now on, assign_stack_local should use
1878 frame_pointer_rtx. */
1879 virtuals_instantiated = 1;
1880}
1881
1882/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1883 all virtual registers in their DECL_RTL's.
1884
1885 If VALID_ONLY, do this only if the resulting address is still valid.
1886 Otherwise, always do it. */
1887
1888static void
1889instantiate_decls (fndecl, valid_only)
1890 tree fndecl;
1891 int valid_only;
1892{
1893 tree decl;
1894
a82ad570 1895 if (DECL_INLINE (fndecl))
6f086dfc
RS
1896 /* When compiling an inline function, the obstack used for
1897 rtl allocation is the maybepermanent_obstack. Calling
1898 `resume_temporary_allocation' switches us back to that
1899 obstack while we process this function's parameters. */
1900 resume_temporary_allocation ();
1901
1902 /* Process all parameters of the function. */
1903 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1904 {
0c2e838b
RK
1905 if (DECL_RTL (decl) && GET_CODE (DECL_RTL (decl)) == MEM
1906 && (! valid_only
1907 || ! mode_dependent_address_p (XEXP (DECL_RTL (decl), 0))))
6f086dfc 1908 instantiate_virtual_regs_1 (&XEXP (DECL_RTL (decl), 0),
5f4f0e22
CH
1909 (valid_only ? DECL_RTL (decl) : NULL_RTX),
1910 0);
6f086dfc 1911 if (DECL_INCOMING_RTL (decl)
0c2e838b
RK
1912 && GET_CODE (DECL_INCOMING_RTL (decl)) == MEM
1913 && (! valid_only
1914 || ! mode_dependent_address_p (XEXP (DECL_INCOMING_RTL (decl), 0))))
6f086dfc 1915 instantiate_virtual_regs_1 (&XEXP (DECL_INCOMING_RTL (decl), 0),
5f4f0e22
CH
1916 (valid_only ? DECL_INCOMING_RTL (decl)
1917 : NULL_RTX),
6f086dfc
RS
1918 0);
1919 }
1920
1921 /* Now process all variables defined in the function or its subblocks. */
1922 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1923
a82ad570 1924 if (DECL_INLINE (fndecl))
6f086dfc
RS
1925 {
1926 /* Save all rtl allocated for this function by raising the
1927 high-water mark on the maybepermanent_obstack. */
1928 preserve_data ();
1929 /* All further rtl allocation is now done in the current_obstack. */
1930 rtl_in_current_obstack ();
1931 }
1932}
1933
1934/* Subroutine of instantiate_decls: Process all decls in the given
1935 BLOCK node and all its subblocks. */
1936
1937static void
1938instantiate_decls_1 (let, valid_only)
1939 tree let;
1940 int valid_only;
1941{
1942 tree t;
1943
1944 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
0c2e838b
RK
1945 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM
1946 && (! valid_only
1947 || ! mode_dependent_address_p (XEXP (DECL_RTL (t), 0))))
6f086dfc 1948 instantiate_virtual_regs_1 (& XEXP (DECL_RTL (t), 0),
5f4f0e22 1949 valid_only ? DECL_RTL (t) : NULL_RTX, 0);
6f086dfc
RS
1950
1951 /* Process all subblocks. */
1952 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1953 instantiate_decls_1 (t, valid_only);
1954}
1955\f
1956/* Given a pointer to a piece of rtx and an optional pointer to the
1957 containing object, instantiate any virtual registers present in it.
1958
1959 If EXTRA_INSNS, we always do the replacement and generate
1960 any extra insns before OBJECT. If it zero, we do nothing if replacement
1961 is not valid.
1962
1963 Return 1 if we either had nothing to do or if we were able to do the
1964 needed replacement. Return 0 otherwise; we only return zero if
1965 EXTRA_INSNS is zero.
1966
1967 We first try some simple transformations to avoid the creation of extra
1968 pseudos. */
1969
1970static int
1971instantiate_virtual_regs_1 (loc, object, extra_insns)
1972 rtx *loc;
1973 rtx object;
1974 int extra_insns;
1975{
1976 rtx x;
1977 RTX_CODE code;
1978 rtx new = 0;
1979 int offset;
1980 rtx temp;
1981 rtx seq;
1982 int i, j;
1983 char *fmt;
1984
1985 /* Re-start here to avoid recursion in common cases. */
1986 restart:
1987
1988 x = *loc;
1989 if (x == 0)
1990 return 1;
1991
1992 code = GET_CODE (x);
1993
1994 /* Check for some special cases. */
1995 switch (code)
1996 {
1997 case CONST_INT:
1998 case CONST_DOUBLE:
1999 case CONST:
2000 case SYMBOL_REF:
2001 case CODE_LABEL:
2002 case PC:
2003 case CC0:
2004 case ASM_INPUT:
2005 case ADDR_VEC:
2006 case ADDR_DIFF_VEC:
2007 case RETURN:
2008 return 1;
2009
2010 case SET:
2011 /* We are allowed to set the virtual registers. This means that
2012 that the actual register should receive the source minus the
2013 appropriate offset. This is used, for example, in the handling
2014 of non-local gotos. */
2015 if (SET_DEST (x) == virtual_incoming_args_rtx)
2016 new = arg_pointer_rtx, offset = - in_arg_offset;
2017 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2018 new = frame_pointer_rtx, offset = - var_offset;
2019 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2020 new = stack_pointer_rtx, offset = - dynamic_offset;
2021 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2022 new = stack_pointer_rtx, offset = - out_arg_offset;
2023
2024 if (new)
2025 {
2026 /* The only valid sources here are PLUS or REG. Just do
2027 the simplest possible thing to handle them. */
2028 if (GET_CODE (SET_SRC (x)) != REG
2029 && GET_CODE (SET_SRC (x)) != PLUS)
2030 abort ();
2031
2032 start_sequence ();
2033 if (GET_CODE (SET_SRC (x)) != REG)
5f4f0e22 2034 temp = force_operand (SET_SRC (x), NULL_RTX);
6f086dfc
RS
2035 else
2036 temp = SET_SRC (x);
5f4f0e22 2037 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
6f086dfc
RS
2038 seq = get_insns ();
2039 end_sequence ();
2040
2041 emit_insns_before (seq, object);
2042 SET_DEST (x) = new;
2043
2044 if (!validate_change (object, &SET_SRC (x), temp, 0)
2045 || ! extra_insns)
2046 abort ();
2047
2048 return 1;
2049 }
2050
2051 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2052 loc = &SET_SRC (x);
2053 goto restart;
2054
2055 case PLUS:
2056 /* Handle special case of virtual register plus constant. */
2057 if (CONSTANT_P (XEXP (x, 1)))
2058 {
2059 rtx old;
2060
2061 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2062 if (GET_CODE (XEXP (x, 0)) == PLUS)
2063 {
2064 rtx inner = XEXP (XEXP (x, 0), 0);
2065
2066 if (inner == virtual_incoming_args_rtx)
2067 new = arg_pointer_rtx, offset = in_arg_offset;
2068 else if (inner == virtual_stack_vars_rtx)
2069 new = frame_pointer_rtx, offset = var_offset;
2070 else if (inner == virtual_stack_dynamic_rtx)
2071 new = stack_pointer_rtx, offset = dynamic_offset;
2072 else if (inner == virtual_outgoing_args_rtx)
2073 new = stack_pointer_rtx, offset = out_arg_offset;
2074 else
2075 {
2076 loc = &XEXP (x, 0);
2077 goto restart;
2078 }
2079
2080 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2081 extra_insns);
2082 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2083 }
2084
2085 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2086 new = arg_pointer_rtx, offset = in_arg_offset;
2087 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2088 new = frame_pointer_rtx, offset = var_offset;
2089 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2090 new = stack_pointer_rtx, offset = dynamic_offset;
2091 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2092 new = stack_pointer_rtx, offset = out_arg_offset;
2093 else
2094 {
2095 /* We know the second operand is a constant. Unless the
2096 first operand is a REG (which has been already checked),
2097 it needs to be checked. */
2098 if (GET_CODE (XEXP (x, 0)) != REG)
2099 {
2100 loc = &XEXP (x, 0);
2101 goto restart;
2102 }
2103 return 1;
2104 }
2105
2106 old = XEXP (x, 0);
2107 XEXP (x, 0) = new;
2108 new = plus_constant (XEXP (x, 1), offset);
2109
2110 /* If the new constant is zero, try to replace the sum with its
2111 first operand. */
2112 if (new == const0_rtx
2113 && validate_change (object, loc, XEXP (x, 0), 0))
2114 return 1;
2115
2116 /* Next try to replace constant with new one. */
2117 if (!validate_change (object, &XEXP (x, 1), new, 0))
2118 {
2119 if (! extra_insns)
2120 {
2121 XEXP (x, 0) = old;
2122 return 0;
2123 }
2124
2125 /* Otherwise copy the new constant into a register and replace
2126 constant with that register. */
2127 temp = gen_reg_rtx (Pmode);
2128 if (validate_change (object, &XEXP (x, 1), temp, 0))
2129 emit_insn_before (gen_move_insn (temp, new), object);
2130 else
2131 {
2132 /* If that didn't work, replace this expression with a
2133 register containing the sum. */
2134
2135 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2136 XEXP (x, 0) = old;
2137
2138 start_sequence ();
5f4f0e22 2139 temp = force_operand (new, NULL_RTX);
6f086dfc
RS
2140 seq = get_insns ();
2141 end_sequence ();
2142
2143 emit_insns_before (seq, object);
2144 if (! validate_change (object, loc, temp, 0)
2145 && ! validate_replace_rtx (x, temp, object))
2146 abort ();
2147 }
2148 }
2149
2150 return 1;
2151 }
2152
2153 /* Fall through to generic two-operand expression case. */
2154 case EXPR_LIST:
2155 case CALL:
2156 case COMPARE:
2157 case MINUS:
2158 case MULT:
2159 case DIV: case UDIV:
2160 case MOD: case UMOD:
2161 case AND: case IOR: case XOR:
2162 case LSHIFT: case ASHIFT: case ROTATE:
2163 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2164 case NE: case EQ:
2165 case GE: case GT: case GEU: case GTU:
2166 case LE: case LT: case LEU: case LTU:
2167 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2168 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2169 loc = &XEXP (x, 0);
2170 goto restart;
2171
2172 case MEM:
2173 /* Most cases of MEM that convert to valid addresses have already been
2174 handled by our scan of regno_reg_rtx. The only special handling we
2175 need here is to make a copy of the rtx to ensure it isn't being
b335c2cc 2176 shared if we have to change it to a pseudo.
6f086dfc
RS
2177
2178 If the rtx is a simple reference to an address via a virtual register,
2179 it can potentially be shared. In such cases, first try to make it
2180 a valid address, which can also be shared. Otherwise, copy it and
2181 proceed normally.
2182
2183 First check for common cases that need no processing. These are
2184 usually due to instantiation already being done on a previous instance
2185 of a shared rtx. */
2186
2187 temp = XEXP (x, 0);
2188 if (CONSTANT_ADDRESS_P (temp)
2189#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2190 || temp == arg_pointer_rtx
2191#endif
2192 || temp == frame_pointer_rtx)
2193 return 1;
2194
2195 if (GET_CODE (temp) == PLUS
2196 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2197 && (XEXP (temp, 0) == frame_pointer_rtx
2198#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2199 || XEXP (temp, 0) == arg_pointer_rtx
2200#endif
2201 ))
2202 return 1;
2203
2204 if (temp == virtual_stack_vars_rtx
2205 || temp == virtual_incoming_args_rtx
2206 || (GET_CODE (temp) == PLUS
2207 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2208 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2209 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2210 {
2211 /* This MEM may be shared. If the substitution can be done without
2212 the need to generate new pseudos, we want to do it in place
2213 so all copies of the shared rtx benefit. The call below will
2214 only make substitutions if the resulting address is still
2215 valid.
2216
2217 Note that we cannot pass X as the object in the recursive call
2218 since the insn being processed may not allow all valid
6461be14
RS
2219 addresses. However, if we were not passed on object, we can
2220 only modify X without copying it if X will have a valid
2221 address.
6f086dfc 2222
6461be14
RS
2223 ??? Also note that this can still lose if OBJECT is an insn that
2224 has less restrictions on an address that some other insn.
2225 In that case, we will modify the shared address. This case
2226 doesn't seem very likely, though. */
2227
2228 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2229 object ? object : x, 0))
6f086dfc
RS
2230 return 1;
2231
2232 /* Otherwise make a copy and process that copy. We copy the entire
2233 RTL expression since it might be a PLUS which could also be
2234 shared. */
2235 *loc = x = copy_rtx (x);
2236 }
2237
2238 /* Fall through to generic unary operation case. */
2239 case USE:
2240 case CLOBBER:
2241 case SUBREG:
2242 case STRICT_LOW_PART:
2243 case NEG: case NOT:
2244 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2245 case SIGN_EXTEND: case ZERO_EXTEND:
2246 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2247 case FLOAT: case FIX:
2248 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2249 case ABS:
2250 case SQRT:
2251 case FFS:
2252 /* These case either have just one operand or we know that we need not
2253 check the rest of the operands. */
2254 loc = &XEXP (x, 0);
2255 goto restart;
2256
2257 case REG:
2258 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2259 in front of this insn and substitute the temporary. */
2260 if (x == virtual_incoming_args_rtx)
2261 new = arg_pointer_rtx, offset = in_arg_offset;
2262 else if (x == virtual_stack_vars_rtx)
2263 new = frame_pointer_rtx, offset = var_offset;
2264 else if (x == virtual_stack_dynamic_rtx)
2265 new = stack_pointer_rtx, offset = dynamic_offset;
2266 else if (x == virtual_outgoing_args_rtx)
2267 new = stack_pointer_rtx, offset = out_arg_offset;
2268
2269 if (new)
2270 {
2271 temp = plus_constant (new, offset);
2272 if (!validate_change (object, loc, temp, 0))
2273 {
2274 if (! extra_insns)
2275 return 0;
2276
2277 start_sequence ();
5f4f0e22 2278 temp = force_operand (temp, NULL_RTX);
6f086dfc
RS
2279 seq = get_insns ();
2280 end_sequence ();
2281
2282 emit_insns_before (seq, object);
2283 if (! validate_change (object, loc, temp, 0)
2284 && ! validate_replace_rtx (x, temp, object))
2285 abort ();
2286 }
2287 }
2288
2289 return 1;
2290 }
2291
2292 /* Scan all subexpressions. */
2293 fmt = GET_RTX_FORMAT (code);
2294 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2295 if (*fmt == 'e')
2296 {
2297 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2298 return 0;
2299 }
2300 else if (*fmt == 'E')
2301 for (j = 0; j < XVECLEN (x, i); j++)
2302 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2303 extra_insns))
2304 return 0;
2305
2306 return 1;
2307}
2308\f
2309/* Optimization: assuming this function does not receive nonlocal gotos,
2310 delete the handlers for such, as well as the insns to establish
2311 and disestablish them. */
2312
2313static void
2314delete_handlers ()
2315{
2316 rtx insn;
2317 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2318 {
2319 /* Delete the handler by turning off the flag that would
2320 prevent jump_optimize from deleting it.
2321 Also permit deletion of the nonlocal labels themselves
2322 if nothing local refers to them. */
2323 if (GET_CODE (insn) == CODE_LABEL)
2324 LABEL_PRESERVE_P (insn) = 0;
2325 if (GET_CODE (insn) == INSN
59257ff7
RK
2326 && ((nonlocal_goto_handler_slot != 0
2327 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
2328 || (nonlocal_goto_stack_level != 0
2329 && reg_mentioned_p (nonlocal_goto_stack_level,
2330 PATTERN (insn)))))
6f086dfc
RS
2331 delete_insn (insn);
2332 }
2333}
2334
2335/* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2336 of the current function. */
2337
2338rtx
2339nonlocal_label_rtx_list ()
2340{
2341 tree t;
2342 rtx x = 0;
2343
2344 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2345 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2346
2347 return x;
2348}
2349\f
2350/* Output a USE for any register use in RTL.
2351 This is used with -noreg to mark the extent of lifespan
2352 of any registers used in a user-visible variable's DECL_RTL. */
2353
2354void
2355use_variable (rtl)
2356 rtx rtl;
2357{
2358 if (GET_CODE (rtl) == REG)
2359 /* This is a register variable. */
2360 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2361 else if (GET_CODE (rtl) == MEM
2362 && GET_CODE (XEXP (rtl, 0)) == REG
2363 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2364 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2365 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2366 /* This is a variable-sized structure. */
2367 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2368}
2369
2370/* Like use_variable except that it outputs the USEs after INSN
2371 instead of at the end of the insn-chain. */
2372
2373void
2374use_variable_after (rtl, insn)
2375 rtx rtl, insn;
2376{
2377 if (GET_CODE (rtl) == REG)
2378 /* This is a register variable. */
2379 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2380 else if (GET_CODE (rtl) == MEM
2381 && GET_CODE (XEXP (rtl, 0)) == REG
2382 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2383 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2384 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2385 /* This is a variable-sized structure. */
2386 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2387}
2388\f
2389int
2390max_parm_reg_num ()
2391{
2392 return max_parm_reg;
2393}
2394
2395/* Return the first insn following those generated by `assign_parms'. */
2396
2397rtx
2398get_first_nonparm_insn ()
2399{
2400 if (last_parm_insn)
2401 return NEXT_INSN (last_parm_insn);
2402 return get_insns ();
2403}
2404
2405/* Return 1 if EXP returns an aggregate value, for which an address
2406 must be passed to the function or returned by the function. */
2407
2408int
2409aggregate_value_p (exp)
2410 tree exp;
2411{
2412 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2413 return 1;
2414 if (RETURN_IN_MEMORY (TREE_TYPE (exp)))
2415 return 1;
2416 if (flag_pcc_struct_return
2417 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2418 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE))
2419 return 1;
2420 return 0;
2421}
2422\f
2423/* Assign RTL expressions to the function's parameters.
2424 This may involve copying them into registers and using
2425 those registers as the RTL for them.
2426
2427 If SECOND_TIME is non-zero it means that this function is being
2428 called a second time. This is done by integrate.c when a function's
2429 compilation is deferred. We need to come back here in case the
2430 FUNCTION_ARG macro computes items needed for the rest of the compilation
2431 (such as changing which registers are fixed or caller-saved). But suppress
2432 writing any insns or setting DECL_RTL of anything in this case. */
2433
2434void
2435assign_parms (fndecl, second_time)
2436 tree fndecl;
2437 int second_time;
2438{
2439 register tree parm;
2440 register rtx entry_parm = 0;
2441 register rtx stack_parm = 0;
2442 CUMULATIVE_ARGS args_so_far;
2443 enum machine_mode passed_mode, nominal_mode;
2444 /* Total space needed so far for args on the stack,
2445 given as a constant and a tree-expression. */
2446 struct args_size stack_args_size;
2447 tree fntype = TREE_TYPE (fndecl);
2448 tree fnargs = DECL_ARGUMENTS (fndecl);
2449 /* This is used for the arg pointer when referring to stack args. */
2450 rtx internal_arg_pointer;
2451 /* This is a dummy PARM_DECL that we used for the function result if
2452 the function returns a structure. */
2453 tree function_result_decl = 0;
2454 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2455 int varargs_setup = 0;
2456
2457 /* Nonzero if the last arg is named `__builtin_va_alist',
2458 which is used on some machines for old-fashioned non-ANSI varargs.h;
2459 this should be stuck onto the stack as if it had arrived there. */
2460 int vararg
2461 = (fnargs
2462 && (parm = tree_last (fnargs)) != 0
2463 && DECL_NAME (parm)
2464 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2465 "__builtin_va_alist")));
2466
2467 /* Nonzero if function takes extra anonymous args.
2468 This means the last named arg must be on the stack
2469 right before the anonymous ones. */
2470 int stdarg
2471 = (TYPE_ARG_TYPES (fntype) != 0
2472 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2473 != void_type_node));
2474
2475 /* If the reg that the virtual arg pointer will be translated into is
2476 not a fixed reg or is the stack pointer, make a copy of the virtual
2477 arg pointer, and address parms via the copy. The frame pointer is
2478 considered fixed even though it is not marked as such.
2479
2480 The second time through, simply use ap to avoid generating rtx. */
2481
2482 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2483 || ! (fixed_regs[ARG_POINTER_REGNUM]
2484 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2485 && ! second_time)
2486 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2487 else
2488 internal_arg_pointer = virtual_incoming_args_rtx;
2489 current_function_internal_arg_pointer = internal_arg_pointer;
2490
2491 stack_args_size.constant = 0;
2492 stack_args_size.var = 0;
2493
2494 /* If struct value address is treated as the first argument, make it so. */
2495 if (aggregate_value_p (DECL_RESULT (fndecl))
2496 && ! current_function_returns_pcc_struct
2497 && struct_value_incoming_rtx == 0)
2498 {
2499 tree type = build_pointer_type (fntype);
2500
5f4f0e22 2501 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
6f086dfc
RS
2502
2503 DECL_ARG_TYPE (function_result_decl) = type;
2504 TREE_CHAIN (function_result_decl) = fnargs;
2505 fnargs = function_result_decl;
2506 }
2507
2508 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2509 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2510
2511#ifdef INIT_CUMULATIVE_INCOMING_ARGS
5f4f0e22 2512 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_PTR);
6f086dfc 2513#else
5f4f0e22 2514 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_PTR);
6f086dfc
RS
2515#endif
2516
2517 /* We haven't yet found an argument that we must push and pretend the
2518 caller did. */
2519 current_function_pretend_args_size = 0;
2520
2521 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2522 {
2523 int aggregate
2524 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2525 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
2526 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE);
2527 struct args_size stack_offset;
2528 struct args_size arg_size;
2529 int passed_pointer = 0;
2530 tree passed_type = DECL_ARG_TYPE (parm);
2531
2532 /* Set LAST_NAMED if this is last named arg before some
2533 anonymous args. We treat it as if it were anonymous too. */
2534 int last_named = ((TREE_CHAIN (parm) == 0
2535 || DECL_NAME (TREE_CHAIN (parm)) == 0)
2536 && (vararg || stdarg));
2537
2538 if (TREE_TYPE (parm) == error_mark_node
2539 /* This can happen after weird syntax errors
2540 or if an enum type is defined among the parms. */
2541 || TREE_CODE (parm) != PARM_DECL
2542 || passed_type == NULL)
2543 {
2544 DECL_RTL (parm) = gen_rtx (MEM, BLKmode, const0_rtx);
2545 TREE_USED (parm) = 1;
2546 continue;
2547 }
2548
2549 /* For varargs.h function, save info about regs and stack space
2550 used by the individual args, not including the va_alist arg. */
2551 if (vararg && last_named)
2552 current_function_args_info = args_so_far;
2553
2554 /* Find mode of arg as it is passed, and mode of arg
2555 as it should be during execution of this function. */
2556 passed_mode = TYPE_MODE (passed_type);
2557 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
2558
16bae307
RS
2559 /* If the parm's mode is VOID, its value doesn't matter,
2560 and avoid the usual things like emit_move_insn that could crash. */
2561 if (nominal_mode == VOIDmode)
2562 {
2563 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
2564 continue;
2565 }
2566
6f086dfc
RS
2567#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2568 /* See if this arg was passed by invisible reference. */
2569 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2570 passed_type, ! last_named))
2571 {
2572 passed_type = build_pointer_type (passed_type);
2573 passed_pointer = 1;
2574 passed_mode = nominal_mode = Pmode;
2575 }
2576#endif
2577
2578 /* Let machine desc say which reg (if any) the parm arrives in.
2579 0 means it arrives on the stack. */
2580#ifdef FUNCTION_INCOMING_ARG
2581 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2582 passed_type, ! last_named);
2583#else
2584 entry_parm = FUNCTION_ARG (args_so_far, passed_mode,
2585 passed_type, ! last_named);
2586#endif
2587
2588#ifdef SETUP_INCOMING_VARARGS
2589 /* If this is the last named parameter, do any required setup for
2590 varargs or stdargs. We need to know about the case of this being an
2591 addressable type, in which case we skip the registers it
2592 would have arrived in.
2593
2594 For stdargs, LAST_NAMED will be set for two parameters, the one that
2595 is actually the last named, and the dummy parameter. We only
2596 want to do this action once.
2597
2598 Also, indicate when RTL generation is to be suppressed. */
2599 if (last_named && !varargs_setup)
2600 {
2601 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
2602 current_function_pretend_args_size,
2603 second_time);
2604 varargs_setup = 1;
2605 }
2606#endif
2607
2608 /* Determine parm's home in the stack,
2609 in case it arrives in the stack or we should pretend it did.
2610
2611 Compute the stack position and rtx where the argument arrives
2612 and its size.
2613
2614 There is one complexity here: If this was a parameter that would
2615 have been passed in registers, but wasn't only because it is
2616 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2617 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2618 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2619 0 as it was the previous time. */
2620
2621 locate_and_pad_parm (passed_mode, passed_type,
2622#ifdef STACK_PARMS_IN_REG_PARM_AREA
2623 1,
2624#else
2625#ifdef FUNCTION_INCOMING_ARG
2626 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2627 passed_type,
2628 (! last_named
2629 || varargs_setup)) != 0,
2630#else
2631 FUNCTION_ARG (args_so_far, passed_mode,
2632 passed_type,
2633 ! last_named || varargs_setup) != 0,
2634#endif
2635#endif
2636 fndecl, &stack_args_size, &stack_offset, &arg_size);
2637
2638 if (! second_time)
2639 {
2640 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
2641
2642 if (offset_rtx == const0_rtx)
2643 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
2644 else
2645 stack_parm = gen_rtx (MEM, passed_mode,
2646 gen_rtx (PLUS, Pmode,
2647 internal_arg_pointer, offset_rtx));
2648
2649 /* If this is a memory ref that contains aggregate components,
2650 mark it as such for cse and loop optimize. */
2651 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2652 }
2653
2654 /* If this parameter was passed both in registers and in the stack,
2655 use the copy on the stack. */
2656 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
2657 entry_parm = 0;
2658
2659 /* If this parm was passed part in regs and part in memory,
2660 pretend it arrived entirely in memory
2661 by pushing the register-part onto the stack.
2662
2663 In the special case of a DImode or DFmode that is split,
2664 we could put it together in a pseudoreg directly,
2665 but for now that's not worth bothering with. */
2666
2667 if (entry_parm)
2668 {
2669 int nregs = 0;
2670#ifdef FUNCTION_ARG_PARTIAL_NREGS
2671 nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
2672 passed_type, ! last_named);
2673#endif
2674
2675 if (nregs > 0)
2676 {
2677 current_function_pretend_args_size
2678 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
2679 / (PARM_BOUNDARY / BITS_PER_UNIT)
2680 * (PARM_BOUNDARY / BITS_PER_UNIT));
2681
2682 if (! second_time)
2683 move_block_from_reg (REGNO (entry_parm),
2684 validize_mem (stack_parm), nregs);
2685 entry_parm = stack_parm;
2686 }
2687 }
2688
2689 /* If we didn't decide this parm came in a register,
2690 by default it came on the stack. */
2691 if (entry_parm == 0)
2692 entry_parm = stack_parm;
2693
2694 /* Record permanently how this parm was passed. */
2695 if (! second_time)
2696 DECL_INCOMING_RTL (parm) = entry_parm;
2697
2698 /* If there is actually space on the stack for this parm,
2699 count it in stack_args_size; otherwise set stack_parm to 0
2700 to indicate there is no preallocated stack slot for the parm. */
2701
2702 if (entry_parm == stack_parm
d9ca49d5 2703#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
6f086dfc 2704 /* On some machines, even if a parm value arrives in a register
d9ca49d5
JW
2705 there is still an (uninitialized) stack slot allocated for it.
2706
2707 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
2708 whether this parameter already has a stack slot allocated,
2709 because an arg block exists only if current_function_args_size
2710 is larger than some threshhold, and we haven't calculated that
2711 yet. So, for now, we just assume that stack slots never exist
2712 in this case. */
6f086dfc
RS
2713 || REG_PARM_STACK_SPACE (fndecl) > 0
2714#endif
2715 )
2716 {
2717 stack_args_size.constant += arg_size.constant;
2718 if (arg_size.var)
2719 ADD_PARM_SIZE (stack_args_size, arg_size.var);
2720 }
2721 else
2722 /* No stack slot was pushed for this parm. */
2723 stack_parm = 0;
2724
2725 /* Update info on where next arg arrives in registers. */
2726
2727 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
2728 passed_type, ! last_named);
2729
2730 /* If this is our second time through, we are done with this parm. */
2731 if (second_time)
2732 continue;
2733
e16c591a
RS
2734 /* If we can't trust the parm stack slot to be aligned enough
2735 for its ultimate type, don't use that slot after entry.
2736 We'll make another stack slot, if we need one. */
2737 {
2738#ifdef FUNCTION_ARG_BOUNDARY
2739 int thisparm_boundary
2740 = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type);
2741#else
2742 int thisparm_boundary = PARM_BOUNDARY;
2743#endif
2744
2745 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
2746 stack_parm = 0;
2747 }
2748
6f086dfc
RS
2749 /* Now adjust STACK_PARM to the mode and precise location
2750 where this parameter should live during execution,
2751 if we discover that it must live in the stack during execution.
2752 To make debuggers happier on big-endian machines, we store
2753 the value in the last bytes of the space available. */
2754
2755 if (nominal_mode != BLKmode && nominal_mode != passed_mode
2756 && stack_parm != 0)
2757 {
2758 rtx offset_rtx;
2759
2760#if BYTES_BIG_ENDIAN
2761 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
2762 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
2763 - GET_MODE_SIZE (nominal_mode));
2764#endif
2765
2766 offset_rtx = ARGS_SIZE_RTX (stack_offset);
2767 if (offset_rtx == const0_rtx)
2768 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
2769 else
2770 stack_parm = gen_rtx (MEM, nominal_mode,
2771 gen_rtx (PLUS, Pmode,
2772 internal_arg_pointer, offset_rtx));
2773
2774 /* If this is a memory ref that contains aggregate components,
2775 mark it as such for cse and loop optimize. */
2776 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2777 }
2778
2779 /* ENTRY_PARM is an RTX for the parameter as it arrives,
2780 in the mode in which it arrives.
2781 STACK_PARM is an RTX for a stack slot where the parameter can live
2782 during the function (in case we want to put it there).
2783 STACK_PARM is 0 if no stack slot was pushed for it.
2784
2785 Now output code if necessary to convert ENTRY_PARM to
2786 the type in which this function declares it,
2787 and store that result in an appropriate place,
2788 which may be a pseudo reg, may be STACK_PARM,
2789 or may be a local stack slot if STACK_PARM is 0.
2790
2791 Set DECL_RTL to that place. */
2792
2793 if (nominal_mode == BLKmode)
2794 {
2795 /* If a BLKmode arrives in registers, copy it to a stack slot. */
2796 if (GET_CODE (entry_parm) == REG)
2797 {
2798 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
2799 UNITS_PER_WORD);
2800
2801 /* Note that we will be storing an integral number of words.
2802 So we have to be careful to ensure that we allocate an
2803 integral number of words. We do this below in the
2804 assign_stack_local if space was not allocated in the argument
2805 list. If it was, this will not work if PARM_BOUNDARY is not
2806 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2807 if it becomes a problem. */
2808
2809 if (stack_parm == 0)
2810 stack_parm
2811 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
2812 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
2813 abort ();
2814
2815 move_block_from_reg (REGNO (entry_parm),
2816 validize_mem (stack_parm),
2817 size_stored / UNITS_PER_WORD);
2818 }
2819 DECL_RTL (parm) = stack_parm;
2820 }
74bd77a8 2821 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
a82ad570 2822 && ! DECL_INLINE (fndecl))
6f086dfc
RS
2823 /* layout_decl may set this. */
2824 || TREE_ADDRESSABLE (parm)
2825 || TREE_SIDE_EFFECTS (parm)
2826 /* If -ffloat-store specified, don't put explicit
2827 float variables into registers. */
2828 || (flag_float_store
2829 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
2830 /* Always assign pseudo to structure return or item passed
2831 by invisible reference. */
2832 || passed_pointer || parm == function_result_decl)
2833 {
2834 /* Store the parm in a pseudoregister during the function. */
2835 register rtx parmreg = gen_reg_rtx (nominal_mode);
2836
2837 REG_USERVAR_P (parmreg) = 1;
2838
2839 /* If this was an item that we received a pointer to, set DECL_RTL
2840 appropriately. */
2841 if (passed_pointer)
2842 {
2843 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
2844 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
2845 }
2846 else
2847 DECL_RTL (parm) = parmreg;
2848
2849 /* Copy the value into the register. */
2850 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
86f8eff3
RK
2851 {
2852 /* If ENTRY_PARM is a hard register, it might be in a register
2853 not valid for operating in its mode (e.g., an odd-numbered
2854 register for a DFmode). In that case, moves are the only
2855 thing valid, so we can't do a convert from there. This
2856 occurs when the calling sequence allow such misaligned
2857 usages. */
2858 if (GET_CODE (entry_parm) == REG
2859 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2860 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm),
2861 GET_MODE (entry_parm)))
5f4f0e22 2862 convert_move (parmreg, copy_to_reg (entry_parm), 0);
86f8eff3
RK
2863 else
2864 convert_move (parmreg, validize_mem (entry_parm), 0);
2865 }
6f086dfc
RS
2866 else
2867 emit_move_insn (parmreg, validize_mem (entry_parm));
2868
74bd77a8
RS
2869 /* If we were passed a pointer but the actual value
2870 can safely live in a register, put it in one. */
16bae307 2871 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
74bd77a8
RS
2872 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
2873 && ! DECL_INLINE (fndecl))
2874 /* layout_decl may set this. */
2875 || TREE_ADDRESSABLE (parm)
2876 || TREE_SIDE_EFFECTS (parm)
2877 /* If -ffloat-store specified, don't put explicit
2878 float variables into registers. */
2879 || (flag_float_store
2880 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
2881 {
2882 parmreg = gen_reg_rtx (nominal_mode);
2883 emit_move_insn (parmreg, DECL_RTL (parm));
2884 DECL_RTL (parm) = parmreg;
2885 }
2886
6f086dfc
RS
2887 /* In any case, record the parm's desired stack location
2888 in case we later discover it must live in the stack. */
2889 if (REGNO (parmreg) >= nparmregs)
2890 {
2891 rtx *new;
2892 nparmregs = REGNO (parmreg) + 5;
2893 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
2894 bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx));
2895 parm_reg_stack_loc = new;
2896 }
2897 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
2898
2899 /* Mark the register as eliminable if we did no conversion
2900 and it was copied from memory at a fixed offset,
2901 and the arg pointer was not copied to a pseudo-reg.
2902 If the arg pointer is a pseudo reg or the offset formed
2903 an invalid address, such memory-equivalences
2904 as we make here would screw up life analysis for it. */
2905 if (nominal_mode == passed_mode
2906 && GET_CODE (entry_parm) == MEM
e16c591a 2907 && entry_parm == stack_parm
6f086dfc
RS
2908 && stack_offset.var == 0
2909 && reg_mentioned_p (virtual_incoming_args_rtx,
2910 XEXP (entry_parm, 0)))
2911 REG_NOTES (get_last_insn ())
2912 = gen_rtx (EXPR_LIST, REG_EQUIV,
2913 entry_parm, REG_NOTES (get_last_insn ()));
2914
2915 /* For pointer data type, suggest pointer register. */
2916 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
2917 mark_reg_pointer (parmreg);
2918 }
2919 else
2920 {
2921 /* Value must be stored in the stack slot STACK_PARM
2922 during function execution. */
2923
2924 if (passed_mode != nominal_mode)
86f8eff3
RK
2925 {
2926 /* Conversion is required. */
2927 if (GET_CODE (entry_parm) == REG
2928 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2929 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm), passed_mode))
2930 entry_parm = copy_to_reg (entry_parm);
2931
2932 entry_parm = convert_to_mode (nominal_mode, entry_parm, 0);
2933 }
6f086dfc
RS
2934
2935 if (entry_parm != stack_parm)
2936 {
2937 if (stack_parm == 0)
2938 stack_parm = assign_stack_local (GET_MODE (entry_parm),
2939 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
2940 emit_move_insn (validize_mem (stack_parm),
2941 validize_mem (entry_parm));
2942 }
2943
2944 DECL_RTL (parm) = stack_parm;
2945 }
2946
2947 /* If this "parameter" was the place where we are receiving the
2948 function's incoming structure pointer, set up the result. */
2949 if (parm == function_result_decl)
2950 DECL_RTL (DECL_RESULT (fndecl))
2951 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
2952
2953 if (TREE_THIS_VOLATILE (parm))
2954 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
2955 if (TREE_READONLY (parm))
2956 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
2957 }
2958
2959 max_parm_reg = max_reg_num ();
2960 last_parm_insn = get_last_insn ();
2961
2962 current_function_args_size = stack_args_size.constant;
2963
2964 /* Adjust function incoming argument size for alignment and
2965 minimum length. */
2966
2967#ifdef REG_PARM_STACK_SPACE
6f90e075 2968#ifndef MAYBE_REG_PARM_STACK_SPACE
6f086dfc
RS
2969 current_function_args_size = MAX (current_function_args_size,
2970 REG_PARM_STACK_SPACE (fndecl));
2971#endif
6f90e075 2972#endif
6f086dfc
RS
2973
2974#ifdef STACK_BOUNDARY
2975#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2976
2977 current_function_args_size
2978 = ((current_function_args_size + STACK_BYTES - 1)
2979 / STACK_BYTES) * STACK_BYTES;
2980#endif
2981
2982#ifdef ARGS_GROW_DOWNWARD
2983 current_function_arg_offset_rtx
5f4f0e22 2984 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
6f086dfc
RS
2985 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
2986 size_int (-stack_args_size.constant)),
5f4f0e22 2987 NULL_RTX, VOIDmode, 0));
6f086dfc
RS
2988#else
2989 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
2990#endif
2991
2992 /* See how many bytes, if any, of its args a function should try to pop
2993 on return. */
2994
2995 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
2996 current_function_args_size);
2997
2998 /* For stdarg.h function, save info about regs and stack space
2999 used by the named args. */
3000
3001 if (stdarg)
3002 current_function_args_info = args_so_far;
3003
3004 /* Set the rtx used for the function return value. Put this in its
3005 own variable so any optimizers that need this information don't have
3006 to include tree.h. Do this here so it gets done when an inlined
3007 function gets output. */
3008
3009 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
3010}
3011\f
3012/* Compute the size and offset from the start of the stacked arguments for a
3013 parm passed in mode PASSED_MODE and with type TYPE.
3014
3015 INITIAL_OFFSET_PTR points to the current offset into the stacked
3016 arguments.
3017
3018 The starting offset and size for this parm are returned in *OFFSET_PTR
3019 and *ARG_SIZE_PTR, respectively.
3020
3021 IN_REGS is non-zero if the argument will be passed in registers. It will
3022 never be set if REG_PARM_STACK_SPACE is not defined.
3023
3024 FNDECL is the function in which the argument was defined.
3025
3026 There are two types of rounding that are done. The first, controlled by
3027 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3028 list to be aligned to the specific boundary (in bits). This rounding
3029 affects the initial and starting offsets, but not the argument size.
3030
3031 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3032 optionally rounds the size of the parm to PARM_BOUNDARY. The
3033 initial offset is not affected by this rounding, while the size always
3034 is and the starting offset may be. */
3035
3036/* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
3037 initial_offset_ptr is positive because locate_and_pad_parm's
3038 callers pass in the total size of args so far as
3039 initial_offset_ptr. arg_size_ptr is always positive.*/
3040
3041static void pad_to_arg_alignment (), pad_below ();
3042
3043void
3044locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
3045 initial_offset_ptr, offset_ptr, arg_size_ptr)
3046 enum machine_mode passed_mode;
3047 tree type;
3048 int in_regs;
3049 tree fndecl;
3050 struct args_size *initial_offset_ptr;
3051 struct args_size *offset_ptr;
3052 struct args_size *arg_size_ptr;
3053{
3054 tree sizetree
3055 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3056 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3057 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3058 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3059 int reg_parm_stack_space = 0;
3060
3061#ifdef REG_PARM_STACK_SPACE
3062 /* If we have found a stack parm before we reach the end of the
3063 area reserved for registers, skip that area. */
3064 if (! in_regs)
3065 {
29008b51
JW
3066#ifdef MAYBE_REG_PARM_STACK_SPACE
3067 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3068#else
6f086dfc 3069 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
29008b51 3070#endif
6f086dfc
RS
3071 if (reg_parm_stack_space > 0)
3072 {
3073 if (initial_offset_ptr->var)
3074 {
3075 initial_offset_ptr->var
3076 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3077 size_int (reg_parm_stack_space));
3078 initial_offset_ptr->constant = 0;
3079 }
3080 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3081 initial_offset_ptr->constant = reg_parm_stack_space;
3082 }
3083 }
3084#endif /* REG_PARM_STACK_SPACE */
3085
3086 arg_size_ptr->var = 0;
3087 arg_size_ptr->constant = 0;
3088
3089#ifdef ARGS_GROW_DOWNWARD
3090 if (initial_offset_ptr->var)
3091 {
3092 offset_ptr->constant = 0;
3093 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3094 initial_offset_ptr->var);
3095 }
3096 else
3097 {
3098 offset_ptr->constant = - initial_offset_ptr->constant;
3099 offset_ptr->var = 0;
3100 }
3101 if (where_pad == upward
3102 && (TREE_CODE (sizetree) != INTEGER_CST
3103 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3104 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3105 SUB_PARM_SIZE (*offset_ptr, sizetree);
3106 pad_to_arg_alignment (offset_ptr, boundary);
3107 if (initial_offset_ptr->var)
3108 {
3109 arg_size_ptr->var = size_binop (MINUS_EXPR,
3110 size_binop (MINUS_EXPR,
3111 integer_zero_node,
3112 initial_offset_ptr->var),
3113 offset_ptr->var);
3114 }
3115 else
3116 {
3117 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3118 offset_ptr->constant);
3119 }
3120/* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3121 if (where_pad == downward)
3122 pad_below (arg_size_ptr, passed_mode, sizetree);
3123#else /* !ARGS_GROW_DOWNWARD */
3124 pad_to_arg_alignment (initial_offset_ptr, boundary);
3125 *offset_ptr = *initial_offset_ptr;
3126 if (where_pad == downward)
3127 pad_below (offset_ptr, passed_mode, sizetree);
3128
3129#ifdef PUSH_ROUNDING
3130 if (passed_mode != BLKmode)
3131 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3132#endif
3133
3134 if (where_pad != none
3135 && (TREE_CODE (sizetree) != INTEGER_CST
3136 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3137 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3138
3139 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3140#endif /* ARGS_GROW_DOWNWARD */
3141}
3142
e16c591a
RS
3143/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3144 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3145
6f086dfc
RS
3146static void
3147pad_to_arg_alignment (offset_ptr, boundary)
3148 struct args_size *offset_ptr;
3149 int boundary;
3150{
3151 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3152
3153 if (boundary > BITS_PER_UNIT)
3154 {
3155 if (offset_ptr->var)
3156 {
3157 offset_ptr->var =
3158#ifdef ARGS_GROW_DOWNWARD
3159 round_down
3160#else
3161 round_up
3162#endif
3163 (ARGS_SIZE_TREE (*offset_ptr),
3164 boundary / BITS_PER_UNIT);
3165 offset_ptr->constant = 0; /*?*/
3166 }
3167 else
3168 offset_ptr->constant =
3169#ifdef ARGS_GROW_DOWNWARD
3170 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3171#else
3172 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3173#endif
3174 }
3175}
3176
3177static void
3178pad_below (offset_ptr, passed_mode, sizetree)
3179 struct args_size *offset_ptr;
3180 enum machine_mode passed_mode;
3181 tree sizetree;
3182{
3183 if (passed_mode != BLKmode)
3184 {
3185 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3186 offset_ptr->constant
3187 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3188 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3189 - GET_MODE_SIZE (passed_mode));
3190 }
3191 else
3192 {
3193 if (TREE_CODE (sizetree) != INTEGER_CST
3194 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3195 {
3196 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3197 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3198 /* Add it in. */
3199 ADD_PARM_SIZE (*offset_ptr, s2);
3200 SUB_PARM_SIZE (*offset_ptr, sizetree);
3201 }
3202 }
3203}
3204
3205static tree
3206round_down (value, divisor)
3207 tree value;
3208 int divisor;
3209{
3210 return size_binop (MULT_EXPR,
3211 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3212 size_int (divisor));
3213}
3214\f
3215/* Walk the tree of blocks describing the binding levels within a function
3216 and warn about uninitialized variables.
3217 This is done after calling flow_analysis and before global_alloc
3218 clobbers the pseudo-regs to hard regs. */
3219
3220void
3221uninitialized_vars_warning (block)
3222 tree block;
3223{
3224 register tree decl, sub;
3225 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3226 {
3227 if (TREE_CODE (decl) == VAR_DECL
3228 /* These warnings are unreliable for and aggregates
3229 because assigning the fields one by one can fail to convince
3230 flow.c that the entire aggregate was initialized.
3231 Unions are troublesome because members may be shorter. */
3232 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3233 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
3234 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3235 && DECL_RTL (decl) != 0
3236 && GET_CODE (DECL_RTL (decl)) == REG
3237 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3238 warning_with_decl (decl,
3239 "`%s' may be used uninitialized in this function");
3240 if (TREE_CODE (decl) == VAR_DECL
3241 && DECL_RTL (decl) != 0
3242 && GET_CODE (DECL_RTL (decl)) == REG
3243 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3244 warning_with_decl (decl,
3245 "variable `%s' may be clobbered by `longjmp'");
3246 }
3247 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3248 uninitialized_vars_warning (sub);
3249}
3250
3251/* Do the appropriate part of uninitialized_vars_warning
3252 but for arguments instead of local variables. */
3253
3254void
3255setjmp_args_warning (block)
3256 tree block;
3257{
3258 register tree decl;
3259 for (decl = DECL_ARGUMENTS (current_function_decl);
3260 decl; decl = TREE_CHAIN (decl))
3261 if (DECL_RTL (decl) != 0
3262 && GET_CODE (DECL_RTL (decl)) == REG
3263 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3264 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp'");
3265}
3266
3267/* If this function call setjmp, put all vars into the stack
3268 unless they were declared `register'. */
3269
3270void
3271setjmp_protect (block)
3272 tree block;
3273{
3274 register tree decl, sub;
3275 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3276 if ((TREE_CODE (decl) == VAR_DECL
3277 || TREE_CODE (decl) == PARM_DECL)
3278 && DECL_RTL (decl) != 0
3279 && GET_CODE (DECL_RTL (decl)) == REG
b335c2cc
TW
3280 /* If this variable came from an inline function, it must be
3281 that it's life doesn't overlap the setjmp. If there was a
3282 setjmp in the function, it would already be in memory. We
3283 must exclude such variable because their DECL_RTL might be
3284 set to strange things such as virtual_stack_vars_rtx. */
3285 && ! DECL_FROM_INLINE (decl)
6f086dfc
RS
3286 && (
3287#ifdef NON_SAVING_SETJMP
3288 /* If longjmp doesn't restore the registers,
3289 don't put anything in them. */
3290 NON_SAVING_SETJMP
3291 ||
3292#endif
a82ad570 3293 ! DECL_REGISTER (decl)))
6f086dfc
RS
3294 put_var_into_stack (decl);
3295 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3296 setjmp_protect (sub);
3297}
3298\f
3299/* Like the previous function, but for args instead of local variables. */
3300
3301void
3302setjmp_protect_args ()
3303{
3304 register tree decl, sub;
3305 for (decl = DECL_ARGUMENTS (current_function_decl);
3306 decl; decl = TREE_CHAIN (decl))
3307 if ((TREE_CODE (decl) == VAR_DECL
3308 || TREE_CODE (decl) == PARM_DECL)
3309 && DECL_RTL (decl) != 0
3310 && GET_CODE (DECL_RTL (decl)) == REG
3311 && (
3312 /* If longjmp doesn't restore the registers,
3313 don't put anything in them. */
3314#ifdef NON_SAVING_SETJMP
3315 NON_SAVING_SETJMP
3316 ||
3317#endif
a82ad570 3318 ! DECL_REGISTER (decl)))
6f086dfc
RS
3319 put_var_into_stack (decl);
3320}
3321\f
3322/* Return the context-pointer register corresponding to DECL,
3323 or 0 if it does not need one. */
3324
3325rtx
3326lookup_static_chain (decl)
3327 tree decl;
3328{
3329 tree context = decl_function_context (decl);
3330 tree link;
3331
3332 if (context == 0)
3333 return 0;
3334
3335 /* We treat inline_function_decl as an alias for the current function
3336 because that is the inline function whose vars, types, etc.
3337 are being merged into the current function.
3338 See expand_inline_function. */
3339 if (context == current_function_decl || context == inline_function_decl)
3340 return virtual_stack_vars_rtx;
3341
3342 for (link = context_display; link; link = TREE_CHAIN (link))
3343 if (TREE_PURPOSE (link) == context)
3344 return RTL_EXPR_RTL (TREE_VALUE (link));
3345
3346 abort ();
3347}
3348\f
3349/* Convert a stack slot address ADDR for variable VAR
3350 (from a containing function)
3351 into an address valid in this function (using a static chain). */
3352
3353rtx
3354fix_lexical_addr (addr, var)
3355 rtx addr;
3356 tree var;
3357{
3358 rtx basereg;
3359 int displacement;
3360 tree context = decl_function_context (var);
3361 struct function *fp;
3362 rtx base = 0;
3363
3364 /* If this is the present function, we need not do anything. */
3365 if (context == current_function_decl || context == inline_function_decl)
3366 return addr;
3367
3368 for (fp = outer_function_chain; fp; fp = fp->next)
3369 if (fp->decl == context)
3370 break;
3371
3372 if (fp == 0)
3373 abort ();
3374
3375 /* Decode given address as base reg plus displacement. */
3376 if (GET_CODE (addr) == REG)
3377 basereg = addr, displacement = 0;
3378 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3379 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3380 else
3381 abort ();
3382
3383 /* We accept vars reached via the containing function's
3384 incoming arg pointer and via its stack variables pointer. */
3385 if (basereg == fp->internal_arg_pointer)
3386 {
3387 /* If reached via arg pointer, get the arg pointer value
3388 out of that function's stack frame.
3389
3390 There are two cases: If a separate ap is needed, allocate a
3391 slot in the outer function for it and dereference it that way.
3392 This is correct even if the real ap is actually a pseudo.
3393 Otherwise, just adjust the offset from the frame pointer to
3394 compensate. */
3395
3396#ifdef NEED_SEPARATE_AP
3397 rtx addr;
3398
3399 if (fp->arg_pointer_save_area == 0)
3400 fp->arg_pointer_save_area
3401 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
3402
3403 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
3404 addr = memory_address (Pmode, addr);
3405
3406 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
3407#else
3408 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
86f8eff3 3409 base = lookup_static_chain (var);
6f086dfc
RS
3410#endif
3411 }
3412
3413 else if (basereg == virtual_stack_vars_rtx)
3414 {
3415 /* This is the same code as lookup_static_chain, duplicated here to
3416 avoid an extra call to decl_function_context. */
3417 tree link;
3418
3419 for (link = context_display; link; link = TREE_CHAIN (link))
3420 if (TREE_PURPOSE (link) == context)
3421 {
3422 base = RTL_EXPR_RTL (TREE_VALUE (link));
3423 break;
3424 }
3425 }
3426
3427 if (base == 0)
3428 abort ();
3429
3430 /* Use same offset, relative to appropriate static chain or argument
3431 pointer. */
3432 return plus_constant (base, displacement);
3433}
3434\f
3435/* Return the address of the trampoline for entering nested fn FUNCTION.
3436 If necessary, allocate a trampoline (in the stack frame)
3437 and emit rtl to initialize its contents (at entry to this function). */
3438
3439rtx
3440trampoline_address (function)
3441 tree function;
3442{
3443 tree link;
3444 tree rtlexp;
3445 rtx tramp;
3446 struct function *fp;
3447 tree fn_context;
3448
3449 /* Find an existing trampoline and return it. */
3450 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3451 if (TREE_PURPOSE (link) == function)
3452 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
3453 for (fp = outer_function_chain; fp; fp = fp->next)
3454 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
3455 if (TREE_PURPOSE (link) == function)
3456 {
3457 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
3458 function);
3459 return round_trampoline_addr (tramp);
3460 }
3461
3462 /* None exists; we must make one. */
3463
3464 /* Find the `struct function' for the function containing FUNCTION. */
3465 fp = 0;
3466 fn_context = decl_function_context (function);
3467 if (fn_context != current_function_decl)
3468 for (fp = outer_function_chain; fp; fp = fp->next)
3469 if (fp->decl == fn_context)
3470 break;
3471
3472 /* Allocate run-time space for this trampoline
3473 (usually in the defining function's stack frame). */
3474#ifdef ALLOCATE_TRAMPOLINE
3475 tramp = ALLOCATE_TRAMPOLINE (fp);
3476#else
3477 /* If rounding needed, allocate extra space
3478 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3479#ifdef TRAMPOLINE_ALIGNMENT
3480#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3481#else
3482#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3483#endif
3484 if (fp != 0)
3485 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
3486 else
3487 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
3488#endif
3489
3490 /* Record the trampoline for reuse and note it for later initialization
3491 by expand_function_end. */
3492 if (fp != 0)
3493 {
3494 push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack);
3495 rtlexp = make_node (RTL_EXPR);
3496 RTL_EXPR_RTL (rtlexp) = tramp;
3497 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
3498 pop_obstacks ();
3499 }
3500 else
3501 {
3502 /* Make the RTL_EXPR node temporary, not momentary, so that the
3503 trampoline_list doesn't become garbage. */
3504 int momentary = suspend_momentary ();
3505 rtlexp = make_node (RTL_EXPR);
3506 resume_momentary (momentary);
3507
3508 RTL_EXPR_RTL (rtlexp) = tramp;
3509 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
3510 }
3511
3512 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
3513 return round_trampoline_addr (tramp);
3514}
3515
3516/* Given a trampoline address,
3517 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3518
3519static rtx
3520round_trampoline_addr (tramp)
3521 rtx tramp;
3522{
3523#ifdef TRAMPOLINE_ALIGNMENT
3524 /* Round address up to desired boundary. */
3525 rtx temp = gen_reg_rtx (Pmode);
3526 temp = expand_binop (Pmode, add_optab, tramp,
5f4f0e22 3527 GEN_INT (TRAMPOLINE_ALIGNMENT - 1),
6f086dfc
RS
3528 temp, 0, OPTAB_LIB_WIDEN);
3529 tramp = expand_binop (Pmode, and_optab, temp,
5f4f0e22 3530 GEN_INT (- TRAMPOLINE_ALIGNMENT),
6f086dfc
RS
3531 temp, 0, OPTAB_LIB_WIDEN);
3532#endif
3533 return tramp;
3534}
3535\f
467456d0
RS
3536/* The functions identify_blocks and reorder_blocks provide a way to
3537 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
3538 duplicate portions of the RTL code. Call identify_blocks before
3539 changing the RTL, and call reorder_blocks after. */
3540
3541static int all_blocks ();
3542static tree blocks_nreverse ();
3543
3544/* Put all this function's BLOCK nodes into a vector, and return it.
3545 Also store in each NOTE for the beginning or end of a block
3546 the index of that block in the vector.
3547 The arguments are TOP_BLOCK, the top-level block of the function,
3548 and INSNS, the insn chain of the function. */
3549
3550tree *
3551identify_blocks (top_block, insns)
3552 tree top_block;
3553 rtx insns;
3554{
fc289cd1
JW
3555 int n_blocks;
3556 tree *block_vector;
3557 int *block_stack;
467456d0
RS
3558 int depth = 0;
3559 int next_block_number = 0;
3560 int current_block_number = 0;
3561 rtx insn;
3562
fc289cd1
JW
3563 if (top_block == 0)
3564 return 0;
3565
3566 n_blocks = all_blocks (top_block, 0);
3567 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
3568 block_stack = (int *) alloca (n_blocks * sizeof (int));
3569
467456d0
RS
3570 all_blocks (top_block, block_vector);
3571
3572 for (insn = insns; insn; insn = NEXT_INSN (insn))
3573 if (GET_CODE (insn) == NOTE)
3574 {
3575 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3576 {
3577 block_stack[depth++] = current_block_number;
3578 current_block_number = next_block_number;
1b2ac438 3579 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
467456d0
RS
3580 }
3581 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3582 {
3583 current_block_number = block_stack[--depth];
1b2ac438 3584 NOTE_BLOCK_NUMBER (insn) = current_block_number;
467456d0
RS
3585 }
3586 }
3587
3588 return block_vector;
3589}
3590
3591/* Given BLOCK_VECTOR which was returned by identify_blocks,
3592 and a revised instruction chain, rebuild the tree structure
3593 of BLOCK nodes to correspond to the new order of RTL.
fc289cd1 3594 The new block tree is inserted below TOP_BLOCK.
467456d0
RS
3595 Returns the current top-level block. */
3596
3597tree
fc289cd1 3598reorder_blocks (block_vector, top_block, insns)
467456d0 3599 tree *block_vector;
fc289cd1 3600 tree top_block;
467456d0
RS
3601 rtx insns;
3602{
fc289cd1 3603 tree current_block = top_block;
467456d0
RS
3604 rtx insn;
3605
fc289cd1
JW
3606 if (block_vector == 0)
3607 return top_block;
3608
3609 /* Prune the old tree away, so that it doesn't get in the way. */
3610 BLOCK_SUBBLOCKS (current_block) = 0;
3611
467456d0
RS
3612 for (insn = insns; insn; insn = NEXT_INSN (insn))
3613 if (GET_CODE (insn) == NOTE)
3614 {
3615 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3616 {
3617 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
3618 /* If we have seen this block before, copy it. */
3619 if (TREE_ASM_WRITTEN (block))
3620 block = copy_node (block);
fc289cd1 3621 BLOCK_SUBBLOCKS (block) = 0;
467456d0
RS
3622 TREE_ASM_WRITTEN (block) = 1;
3623 BLOCK_SUPERCONTEXT (block) = current_block;
3624 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3625 BLOCK_SUBBLOCKS (current_block) = block;
3626 current_block = block;
1b2ac438 3627 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
3628 }
3629 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3630 {
3631 BLOCK_SUBBLOCKS (current_block)
3632 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3633 current_block = BLOCK_SUPERCONTEXT (current_block);
1b2ac438 3634 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
3635 }
3636 }
3637
3638 return current_block;
3639}
3640
3641/* Reverse the order of elements in the chain T of blocks,
3642 and return the new head of the chain (old last element). */
3643
3644static tree
3645blocks_nreverse (t)
3646 tree t;
3647{
3648 register tree prev = 0, decl, next;
3649 for (decl = t; decl; decl = next)
3650 {
3651 next = BLOCK_CHAIN (decl);
3652 BLOCK_CHAIN (decl) = prev;
3653 prev = decl;
3654 }
3655 return prev;
3656}
3657
3658/* Count the subblocks of BLOCK, and list them all into the vector VECTOR.
3659 Also clear TREE_ASM_WRITTEN in all blocks. */
3660
3661static int
3662all_blocks (block, vector)
3663 tree block;
3664 tree *vector;
3665{
3666 int n_blocks = 1;
3667 tree subblocks;
3668
3669 TREE_ASM_WRITTEN (block) = 0;
3670 /* Record this block. */
fc289cd1
JW
3671 if (vector)
3672 vector[0] = block;
467456d0
RS
3673
3674 /* Record the subblocks, and their subblocks. */
3675 for (subblocks = BLOCK_SUBBLOCKS (block);
3676 subblocks; subblocks = BLOCK_CHAIN (subblocks))
fc289cd1 3677 n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0);
467456d0
RS
3678
3679 return n_blocks;
3680}
3681\f
6f086dfc
RS
3682/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3683 and initialize static variables for generating RTL for the statements
3684 of the function. */
3685
3686void
3687init_function_start (subr, filename, line)
3688 tree subr;
3689 char *filename;
3690 int line;
3691{
3692 char *junk;
3693
3694 init_stmt_for_function ();
3695
3696 cse_not_expected = ! optimize;
3697
3698 /* Caller save not needed yet. */
3699 caller_save_needed = 0;
3700
3701 /* No stack slots have been made yet. */
3702 stack_slot_list = 0;
3703
3704 /* There is no stack slot for handling nonlocal gotos. */
3705 nonlocal_goto_handler_slot = 0;
3706 nonlocal_goto_stack_level = 0;
3707
3708 /* No labels have been declared for nonlocal use. */
3709 nonlocal_labels = 0;
3710
3711 /* No function calls so far in this function. */
3712 function_call_count = 0;
3713
3714 /* No parm regs have been allocated.
3715 (This is important for output_inline_function.) */
3716 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3717
3718 /* Initialize the RTL mechanism. */
3719 init_emit ();
3720
3721 /* Initialize the queue of pending postincrement and postdecrements,
3722 and some other info in expr.c. */
3723 init_expr ();
3724
3725 /* We haven't done register allocation yet. */
3726 reg_renumber = 0;
3727
3728 init_const_rtx_hash_table ();
3729
3730 current_function_name = (*decl_printable_name) (subr, &junk);
3731
3732 /* Nonzero if this is a nested function that uses a static chain. */
3733
3734 current_function_needs_context
3735 = (decl_function_context (current_function_decl) != 0);
3736
3737 /* Set if a call to setjmp is seen. */
3738 current_function_calls_setjmp = 0;
3739
3740 /* Set if a call to longjmp is seen. */
3741 current_function_calls_longjmp = 0;
3742
3743 current_function_calls_alloca = 0;
3744 current_function_has_nonlocal_label = 0;
3745 current_function_contains_functions = 0;
3746
3747 current_function_returns_pcc_struct = 0;
3748 current_function_returns_struct = 0;
3749 current_function_epilogue_delay_list = 0;
3750 current_function_uses_const_pool = 0;
3751 current_function_uses_pic_offset_table = 0;
3752
3753 /* We have not yet needed to make a label to jump to for tail-recursion. */
3754 tail_recursion_label = 0;
3755
3756 /* We haven't had a need to make a save area for ap yet. */
3757
3758 arg_pointer_save_area = 0;
3759
3760 /* No stack slots allocated yet. */
3761 frame_offset = 0;
3762
3763 /* No SAVE_EXPRs in this function yet. */
3764 save_expr_regs = 0;
3765
3766 /* No RTL_EXPRs in this function yet. */
3767 rtl_expr_chain = 0;
3768
3769 /* We have not allocated any temporaries yet. */
3770 temp_slots = 0;
3771 temp_slot_level = 0;
3772
3773 /* Within function body, compute a type's size as soon it is laid out. */
3774 immediate_size_expand++;
3775
3776 init_pending_stack_adjust ();
3777 inhibit_defer_pop = 0;
3778
3779 current_function_outgoing_args_size = 0;
3780
3781 /* Initialize the insn lengths. */
3782 init_insn_lengths ();
3783
3784 /* Prevent ever trying to delete the first instruction of a function.
3785 Also tell final how to output a linenum before the function prologue. */
3786 emit_line_note (filename, line);
3787
3788 /* Make sure first insn is a note even if we don't want linenums.
3789 This makes sure the first insn will never be deleted.
3790 Also, final expects a note to appear there. */
5f4f0e22 3791 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
3792
3793 /* Set flags used by final.c. */
3794 if (aggregate_value_p (DECL_RESULT (subr)))
3795 {
3796#ifdef PCC_STATIC_STRUCT_RETURN
3797 if (flag_pcc_struct_return)
3798 current_function_returns_pcc_struct = 1;
3799 else
3800#endif
3801 current_function_returns_struct = 1;
3802 }
3803
3804 /* Warn if this value is an aggregate type,
3805 regardless of which calling convention we are using for it. */
3806 if (warn_aggregate_return
3807 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
3808 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
3809 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
3810 warning ("function returns an aggregate");
3811
3812 current_function_returns_pointer
3813 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
3814
3815 /* Indicate that we need to distinguish between the return value of the
3816 present function and the return value of a function being called. */
3817 rtx_equal_function_value_matters = 1;
3818
3819 /* Indicate that we have not instantiated virtual registers yet. */
3820 virtuals_instantiated = 0;
3821
3822 /* Indicate we have no need of a frame pointer yet. */
3823 frame_pointer_needed = 0;
3824
3825 /* By default assume not varargs. */
3826 current_function_varargs = 0;
3827}
3828
3829/* Indicate that the current function uses extra args
3830 not explicitly mentioned in the argument list in any fashion. */
3831
3832void
3833mark_varargs ()
3834{
3835 current_function_varargs = 1;
3836}
3837
3838/* Expand a call to __main at the beginning of a possible main function. */
3839
3840void
3841expand_main_function ()
3842{
b335c2cc 3843#if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
6f086dfc
RS
3844 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__main"), 0,
3845 VOIDmode, 0);
b335c2cc 3846#endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
6f086dfc
RS
3847}
3848\f
3849/* Start the RTL for a new function, and set variables used for
3850 emitting RTL.
3851 SUBR is the FUNCTION_DECL node.
3852 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3853 the function's parameters, which must be run at any return statement. */
3854
3855void
3856expand_function_start (subr, parms_have_cleanups)
3857 tree subr;
3858 int parms_have_cleanups;
3859{
3860 register int i;
3861 tree tem;
3862 rtx last_ptr;
3863
3864 /* Make sure volatile mem refs aren't considered
3865 valid operands of arithmetic insns. */
3866 init_recog_no_volatile ();
3867
3868 /* If function gets a static chain arg, store it in the stack frame.
3869 Do this first, so it gets the first stack slot offset. */
3870 if (current_function_needs_context)
3e2481e9
JW
3871 {
3872 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3873 emit_move_insn (last_ptr, static_chain_incoming_rtx);
3874 }
6f086dfc
RS
3875
3876 /* If the parameters of this function need cleaning up, get a label
3877 for the beginning of the code which executes those cleanups. This must
3878 be done before doing anything with return_label. */
3879 if (parms_have_cleanups)
3880 cleanup_label = gen_label_rtx ();
3881 else
3882 cleanup_label = 0;
3883
3884 /* Make the label for return statements to jump to, if this machine
3885 does not have a one-instruction return and uses an epilogue,
3886 or if it returns a structure, or if it has parm cleanups. */
3887#ifdef HAVE_return
3888 if (cleanup_label == 0 && HAVE_return
3889 && ! current_function_returns_pcc_struct
3890 && ! (current_function_returns_struct && ! optimize))
3891 return_label = 0;
3892 else
3893 return_label = gen_label_rtx ();
3894#else
3895 return_label = gen_label_rtx ();
3896#endif
3897
3898 /* Initialize rtx used to return the value. */
3899 /* Do this before assign_parms so that we copy the struct value address
3900 before any library calls that assign parms might generate. */
3901
3902 /* Decide whether to return the value in memory or in a register. */
3903 if (aggregate_value_p (DECL_RESULT (subr)))
3904 {
3905 /* Returning something that won't go in a register. */
3906 register rtx value_address;
3907
3908#ifdef PCC_STATIC_STRUCT_RETURN
3909 if (current_function_returns_pcc_struct)
3910 {
3911 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
3912 value_address = assemble_static_space (size);
3913 }
3914 else
3915#endif
3916 {
3917 /* Expect to be passed the address of a place to store the value.
3918 If it is passed as an argument, assign_parms will take care of
3919 it. */
3920 if (struct_value_incoming_rtx)
3921 {
3922 value_address = gen_reg_rtx (Pmode);
3923 emit_move_insn (value_address, struct_value_incoming_rtx);
3924 }
3925 }
3926 if (value_address)
3927 DECL_RTL (DECL_RESULT (subr))
3928 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
3929 value_address);
3930 }
3931 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
3932 /* If return mode is void, this decl rtl should not be used. */
3933 DECL_RTL (DECL_RESULT (subr)) = 0;
3934 else if (parms_have_cleanups)
3935 /* If function will end with cleanup code for parms,
3936 compute the return values into a pseudo reg,
3937 which we will copy into the true return register
3938 after the cleanups are done. */
3939 DECL_RTL (DECL_RESULT (subr))
3940 = gen_reg_rtx (DECL_MODE (DECL_RESULT (subr)));
3941 else
3942 /* Scalar, returned in a register. */
3943 {
3944#ifdef FUNCTION_OUTGOING_VALUE
3945 DECL_RTL (DECL_RESULT (subr))
3946 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
3947#else
3948 DECL_RTL (DECL_RESULT (subr))
3949 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
3950#endif
3951
3952 /* Mark this reg as the function's return value. */
3953 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
3954 {
3955 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
3956 /* Needed because we may need to move this to memory
3957 in case it's a named return value whose address is taken. */
a82ad570 3958 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6f086dfc
RS
3959 }
3960 }
3961
3962 /* Initialize rtx for parameters and local variables.
3963 In some cases this requires emitting insns. */
3964
3965 assign_parms (subr, 0);
3966
3967 /* The following was moved from init_function_start.
3968 The move is supposed to make sdb output more accurate. */
3969 /* Indicate the beginning of the function body,
3970 as opposed to parm setup. */
5f4f0e22 3971 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6f086dfc
RS
3972
3973 /* If doing stupid allocation, mark parms as born here. */
3974
3975 if (GET_CODE (get_last_insn ()) != NOTE)
5f4f0e22 3976 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
3977 parm_birth_insn = get_last_insn ();
3978
3979 if (obey_regdecls)
3980 {
3981 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
3982 use_variable (regno_reg_rtx[i]);
3983
3984 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
3985 use_variable (current_function_internal_arg_pointer);
3986 }
3987
3988 /* Fetch static chain values for containing functions. */
3989 tem = decl_function_context (current_function_decl);
3e2481e9
JW
3990 /* If not doing stupid register allocation, then start off with the static
3991 chain pointer in a pseudo register. Otherwise, we use the stack
3992 address that was generated above. */
3993 if (tem && ! obey_regdecls)
6f086dfc
RS
3994 last_ptr = copy_to_reg (static_chain_incoming_rtx);
3995 context_display = 0;
3996 while (tem)
3997 {
3998 tree rtlexp = make_node (RTL_EXPR);
3999
4000 RTL_EXPR_RTL (rtlexp) = last_ptr;
4001 context_display = tree_cons (tem, rtlexp, context_display);
4002 tem = decl_function_context (tem);
4003 if (tem == 0)
4004 break;
4005 /* Chain thru stack frames, assuming pointer to next lexical frame
4006 is found at the place we always store it. */
4007#ifdef FRAME_GROWS_DOWNWARD
4008 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
4009#endif
4010 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
4011 memory_address (Pmode, last_ptr)));
4012 }
4013
4014 /* After the display initializations is where the tail-recursion label
4015 should go, if we end up needing one. Ensure we have a NOTE here
4016 since some things (like trampolines) get placed before this. */
5f4f0e22 4017 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
4018
4019 /* Evaluate now the sizes of any types declared among the arguments. */
4020 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5f4f0e22 4021 expand_expr (TREE_VALUE (tem), NULL_RTX, VOIDmode, 0);
6f086dfc
RS
4022
4023 /* Make sure there is a line number after the function entry setup code. */
4024 force_next_line_note ();
4025}
4026\f
4027/* Generate RTL for the end of the current function.
4028 FILENAME and LINE are the current position in the source file. */
4029
4030/* It is up to language-specific callers to do cleanups for parameters. */
4031
4032void
4033expand_function_end (filename, line)
4034 char *filename;
4035 int line;
4036{
4037 register int i;
4038 tree link;
4039
4040 static rtx initial_trampoline;
4041
4042#ifdef NON_SAVING_SETJMP
4043 /* Don't put any variables in registers if we call setjmp
4044 on a machine that fails to restore the registers. */
4045 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
4046 {
4047 setjmp_protect (DECL_INITIAL (current_function_decl));
4048 setjmp_protect_args ();
4049 }
4050#endif
4051
4052 /* Save the argument pointer if a save area was made for it. */
4053 if (arg_pointer_save_area)
4054 {
4055 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
4056 emit_insn_before (x, tail_recursion_reentry);
4057 }
4058
4059 /* Initialize any trampolines required by this function. */
4060 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4061 {
4062 tree function = TREE_PURPOSE (link);
4063 rtx context = lookup_static_chain (function);
4064 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
4065 rtx seq;
4066
4067 /* First make sure this compilation has a template for
4068 initializing trampolines. */
4069 if (initial_trampoline == 0)
86f8eff3
RK
4070 {
4071 end_temporary_allocation ();
4072 initial_trampoline
4073 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
4074 resume_temporary_allocation ();
4075 }
6f086dfc
RS
4076
4077 /* Generate insns to initialize the trampoline. */
4078 start_sequence ();
4079 tramp = change_address (initial_trampoline, BLKmode,
4080 round_trampoline_addr (XEXP (tramp, 0)));
5f4f0e22 4081 emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE),
6f086dfc
RS
4082 FUNCTION_BOUNDARY / BITS_PER_UNIT);
4083 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
4084 XEXP (DECL_RTL (function), 0), context);
4085 seq = get_insns ();
4086 end_sequence ();
4087
4088 /* Put those insns at entry to the containing function (this one). */
4089 emit_insns_before (seq, tail_recursion_reentry);
4090 }
4091 /* Clear the trampoline_list for the next function. */
4092 trampoline_list = 0;
4093
4094#if 0 /* I think unused parms are legitimate enough. */
4095 /* Warn about unused parms. */
4096 if (warn_unused)
4097 {
4098 rtx decl;
4099
4100 for (decl = DECL_ARGUMENTS (current_function_decl);
4101 decl; decl = TREE_CHAIN (decl))
4102 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
4103 warning_with_decl (decl, "unused parameter `%s'");
4104 }
4105#endif
4106
4107 /* Delete handlers for nonlocal gotos if nothing uses them. */
4108 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
4109 delete_handlers ();
4110
4111 /* End any sequences that failed to be closed due to syntax errors. */
4112 while (in_sequence_p ())
5f4f0e22 4113 end_sequence ();
6f086dfc
RS
4114
4115 /* Outside function body, can't compute type's actual size
4116 until next function's body starts. */
4117 immediate_size_expand--;
4118
4119 /* If doing stupid register allocation,
4120 mark register parms as dying here. */
4121
4122 if (obey_regdecls)
4123 {
4124 rtx tem;
4125 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4126 use_variable (regno_reg_rtx[i]);
4127
4128 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
4129
4130 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
4131 {
4132 use_variable (XEXP (tem, 0));
4133 use_variable_after (XEXP (tem, 0), parm_birth_insn);
4134 }
4135
4136 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4137 use_variable (current_function_internal_arg_pointer);
4138 }
4139
4140 clear_pending_stack_adjust ();
4141 do_pending_stack_adjust ();
4142
4143 /* Mark the end of the function body.
4144 If control reaches this insn, the function can drop through
4145 without returning a value. */
5f4f0e22 4146 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6f086dfc
RS
4147
4148 /* Output a linenumber for the end of the function.
4149 SDB depends on this. */
4150 emit_line_note_force (filename, line);
4151
4152 /* Output the label for the actual return from the function,
4153 if one is expected. This happens either because a function epilogue
4154 is used instead of a return instruction, or because a return was done
4155 with a goto in order to run local cleanups, or because of pcc-style
4156 structure returning. */
4157
4158 if (return_label)
4159 emit_label (return_label);
4160
4161 /* If we had calls to alloca, and this machine needs
4162 an accurate stack pointer to exit the function,
4163 insert some code to save and restore the stack pointer. */
4164#ifdef EXIT_IGNORE_STACK
4165 if (! EXIT_IGNORE_STACK)
4166#endif
4167 if (current_function_calls_alloca)
4168 {
59257ff7
RK
4169 rtx tem = 0;
4170
4171 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5f4f0e22 4172 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6f086dfc
RS
4173 }
4174
4175 /* If scalar return value was computed in a pseudo-reg,
4176 copy that to the hard return register. */
4177 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
4178 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
4179 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
4180 >= FIRST_PSEUDO_REGISTER))
4181 {
4182 rtx real_decl_result;
4183
4184#ifdef FUNCTION_OUTGOING_VALUE
4185 real_decl_result
4186 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4187 current_function_decl);
4188#else
4189 real_decl_result
4190 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4191 current_function_decl);
4192#endif
4193 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
4194 emit_move_insn (real_decl_result,
4195 DECL_RTL (DECL_RESULT (current_function_decl)));
4196 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
4197 }
4198
4199 /* If returning a structure, arrange to return the address of the value
4200 in a place where debuggers expect to find it.
4201
4202 If returning a structure PCC style,
4203 the caller also depends on this value.
4204 And current_function_returns_pcc_struct is not necessarily set. */
4205 if (current_function_returns_struct
4206 || current_function_returns_pcc_struct)
4207 {
4208 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4209 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4210#ifdef FUNCTION_OUTGOING_VALUE
4211 rtx outgoing
4212 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4213 current_function_decl);
4214#else
4215 rtx outgoing
4216 = FUNCTION_VALUE (build_pointer_type (type),
4217 current_function_decl);
4218#endif
4219
4220 /* Mark this as a function return value so integrate will delete the
4221 assignment and USE below when inlining this function. */
4222 REG_FUNCTION_VALUE_P (outgoing) = 1;
4223
4224 emit_move_insn (outgoing, value_address);
4225 use_variable (outgoing);
4226 }
4227
4228 /* Output a return insn if we are using one.
4229 Otherwise, let the rtl chain end here, to drop through
4230 into the epilogue. */
4231
4232#ifdef HAVE_return
4233 if (HAVE_return)
4234 {
4235 emit_jump_insn (gen_return ());
4236 emit_barrier ();
4237 }
4238#endif
4239
4240 /* Fix up any gotos that jumped out to the outermost
4241 binding level of the function.
4242 Must follow emitting RETURN_LABEL. */
4243
4244 /* If you have any cleanups to do at this point,
4245 and they need to create temporary variables,
4246 then you will lose. */
5f4f0e22 4247 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0);
6f086dfc 4248}
bdac5f58
TW
4249\f
4250/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
4251
4252static int *prologue;
4253static int *epilogue;
4254
4255/* Create an array that records the INSN_UIDs of INSNS (either a sequence
4256 or a single insn). */
4257
4258static int *
4259record_insns (insns)
4260 rtx insns;
4261{
4262 int *vec;
4263
4264 if (GET_CODE (insns) == SEQUENCE)
4265 {
4266 int len = XVECLEN (insns, 0);
4267 vec = (int *) oballoc ((len + 1) * sizeof (int));
4268 vec[len] = 0;
4269 while (--len >= 0)
4270 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
4271 }
4272 else
4273 {
4274 vec = (int *) oballoc (2 * sizeof (int));
4275 vec[0] = INSN_UID (insns);
4276 vec[1] = 0;
4277 }
4278 return vec;
4279}
4280
10914065 4281/* Determine how many INSN_UIDs in VEC are part of INSN. */
bdac5f58 4282
10914065 4283static int
bdac5f58
TW
4284contains (insn, vec)
4285 rtx insn;
4286 int *vec;
4287{
4288 register int i, j;
4289
4290 if (GET_CODE (insn) == INSN
4291 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4292 {
10914065 4293 int count = 0;
bdac5f58
TW
4294 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4295 for (j = 0; vec[j]; j++)
4296 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
10914065
TW
4297 count++;
4298 return count;
bdac5f58
TW
4299 }
4300 else
4301 {
4302 for (j = 0; vec[j]; j++)
4303 if (INSN_UID (insn) == vec[j])
10914065 4304 return 1;
bdac5f58
TW
4305 }
4306 return 0;
4307}
4308
4309/* Generate the prologe and epilogue RTL if the machine supports it. Thread
4310 this into place with notes indicating where the prologue ends and where
4311 the epilogue begins. Update the basic block information when possible. */
4312
4313void
4314thread_prologue_and_epilogue_insns (f)
4315 rtx f;
4316{
4317#ifdef HAVE_prologue
4318 if (HAVE_prologue)
4319 {
4320 rtx head, seq, insn;
4321
4322 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
4323 prologue insns and a NOTE_INSN_PROLOGUE_END. */
4324 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
4325 seq = gen_prologue ();
4326 head = emit_insn_after (seq, f);
4327
4328 /* Include the new prologue insns in the first block. Ignore them
4329 if they form a basic block unto themselves. */
4330 if (basic_block_head && n_basic_blocks
4331 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
4332 basic_block_head[0] = NEXT_INSN (f);
4333
4334 /* Retain a map of the prologue insns. */
4335 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
4336 }
4337 else
4338#endif
4339 prologue = 0;
4340
4341#ifdef HAVE_epilogue
4342 if (HAVE_epilogue)
4343 {
4344 rtx insn = get_last_insn ();
4345 rtx prev = prev_nonnote_insn (insn);
4346
4347 /* If we end with a BARRIER, we don't need an epilogue. */
4348 if (! (prev && GET_CODE (prev) == BARRIER))
4349 {
4350 rtx tail, seq;
4351
4352 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG,
4353 the epilogue insns (this must include the jump insn that
4354 returns), USE insns ad the end of a function, and a BARRIER. */
4355
4356 emit_barrier_after (insn);
4357
4358 /* Place the epilogue before the USE insns at the end of a
4359 function. */
4360 while (prev
4361 && GET_CODE (prev) == INSN
4362 && GET_CODE (PATTERN (prev)) == USE)
4363 {
4364 insn = PREV_INSN (prev);
4365 prev = prev_nonnote_insn (prev);
4366 }
4367
4368 seq = gen_epilogue ();
4369 tail = emit_jump_insn_after (seq, insn);
4370 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
4371
4372 /* Include the new epilogue insns in the last block. Ignore
4373 them if they form a basic block unto themselves. */
4374 if (basic_block_end && n_basic_blocks
4375 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
4376 basic_block_end[n_basic_blocks - 1] = tail;
4377
4378 /* Retain a map of the epilogue insns. */
4379 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
4380 return;
4381 }
4382 }
4383#endif
4384 epilogue = 0;
4385}
4386
4387/* Reposition the prologue-end and epilogue-begin notes after instruction
4388 scheduling and delayed branch scheduling. */
4389
4390void
4391reposition_prologue_and_epilogue_notes (f)
4392 rtx f;
4393{
4394#if defined (HAVE_prologue) || defined (HAVE_epilogue)
4395 /* Reposition the prologue and epilogue notes. */
4396 if (n_basic_blocks)
4397 {
4398 rtx next, prev;
bf526252 4399 int len;
bdac5f58
TW
4400
4401 if (prologue)
4402 {
bf526252
RK
4403 register rtx insn, note = 0;
4404
4405 /* Scan from the beginning until we reach the last prologue insn.
4406 We apparently can't depend on basic_block_{head,end} after
4407 reorg has run. */
4408 for (len = 0; prologue[len]; len++)
4409 ;
4410 for (insn = f; insn; insn = NEXT_INSN (insn))
4411 if (GET_CODE (insn) == NOTE)
4412 {
4413 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
4414 note = insn;
4415 }
10914065 4416 else if ((len -= contains (insn, prologue)) == 0)
bdac5f58 4417 {
bf526252
RK
4418 /* Find the prologue-end note if we haven't already, and
4419 move it to just after the last prologue insn. */
4420 if (note == 0)
4421 for (note = insn; note = NEXT_INSN (note);)
4422 if (GET_CODE (note) == NOTE
4423 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
4424 break;
4425 next = NEXT_INSN (note);
4426 prev = PREV_INSN (note);
bdac5f58
TW
4427 if (prev)
4428 NEXT_INSN (prev) = next;
4429 if (next)
4430 PREV_INSN (next) = prev;
bf526252 4431 add_insn_after (note, insn);
bdac5f58
TW
4432 break;
4433 }
4434 }
4435
4436 if (epilogue)
4437 {
bf526252
RK
4438 register rtx insn, note = 0;
4439
4440 /* Scan from the end until we reach the first epilogue insn.
4441 We apparently can't depend on basic_block_{head,end} after
4442 reorg has run. */
4443 for (len = 0; epilogue[len]; len++)
4444 ;
4445 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
4446 if (GET_CODE (insn) == NOTE)
4447 {
4448 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4449 note = insn;
4450 }
10914065 4451 else if ((len -= contains (insn, epilogue)) == 0)
bdac5f58 4452 {
bf526252
RK
4453 /* Find the epilogue-begin note if we haven't already, and
4454 move it to just before the first epilogue insn. */
4455 if (note == 0)
4456 for (note = insn; note = PREV_INSN (note);)
4457 if (GET_CODE (note) == NOTE
4458 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
4459 break;
4460 next = NEXT_INSN (note);
4461 prev = PREV_INSN (note);
bdac5f58
TW
4462 if (prev)
4463 NEXT_INSN (prev) = next;
4464 if (next)
4465 PREV_INSN (next) = prev;
bf526252 4466 add_insn_after (note, PREV_INSN (insn));
bdac5f58
TW
4467 break;
4468 }
4469 }
4470 }
4471#endif /* HAVE_prologue or HAVE_epilogue */
4472}
This page took 0.517995 seconds and 5 git commands to generate.