]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
Correctly apply last change.
[gcc.git] / gcc / function.c
CommitLineData
6f086dfc 1/* Expands front end tree to back end RTL for GNU C-Compiler
1b2ac438 2 Copyright (C) 1987, 1988, 1989, 1991, 1992 Free Software Foundation, Inc.
6f086dfc
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
29
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
34
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
39
40#include "config.h"
41
42#include <stdio.h>
43
44#include "rtl.h"
45#include "tree.h"
46#include "flags.h"
47#include "function.h"
48#include "insn-flags.h"
49#include "expr.h"
50#include "insn-codes.h"
51#include "regs.h"
52#include "hard-reg-set.h"
53#include "insn-config.h"
54#include "recog.h"
55#include "output.h"
bdac5f58 56#include "basic-block.h"
6f086dfc
RS
57
58/* Round a value to the lowest integer less than it that is a multiple of
59 the required alignment. Avoid using division in case the value is
60 negative. Assume the alignment is a power of two. */
61#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
62
63/* Similar, but round to the next highest integer that meets the
64 alignment. */
65#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
66
67/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
68 during rtl generation. If they are different register numbers, this is
69 always true. It may also be true if
70 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
71 generation. See fix_lexical_addr for details. */
72
73#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
74#define NEED_SEPARATE_AP
75#endif
76
77/* Number of bytes of args popped by function being compiled on its return.
78 Zero if no bytes are to be popped.
79 May affect compilation of return insn or of function epilogue. */
80
81int current_function_pops_args;
82
83/* Nonzero if function being compiled needs to be given an address
84 where the value should be stored. */
85
86int current_function_returns_struct;
87
88/* Nonzero if function being compiled needs to
89 return the address of where it has put a structure value. */
90
91int current_function_returns_pcc_struct;
92
93/* Nonzero if function being compiled needs to be passed a static chain. */
94
95int current_function_needs_context;
96
97/* Nonzero if function being compiled can call setjmp. */
98
99int current_function_calls_setjmp;
100
101/* Nonzero if function being compiled can call longjmp. */
102
103int current_function_calls_longjmp;
104
105/* Nonzero if function being compiled receives nonlocal gotos
106 from nested functions. */
107
108int current_function_has_nonlocal_label;
109
110/* Nonzero if function being compiled contains nested functions. */
111
112int current_function_contains_functions;
113
114/* Nonzero if function being compiled can call alloca,
115 either as a subroutine or builtin. */
116
117int current_function_calls_alloca;
118
119/* Nonzero if the current function returns a pointer type */
120
121int current_function_returns_pointer;
122
123/* If some insns can be deferred to the delay slots of the epilogue, the
124 delay list for them is recorded here. */
125
126rtx current_function_epilogue_delay_list;
127
128/* If function's args have a fixed size, this is that size, in bytes.
129 Otherwise, it is -1.
130 May affect compilation of return insn or of function epilogue. */
131
132int current_function_args_size;
133
134/* # bytes the prologue should push and pretend that the caller pushed them.
135 The prologue must do this, but only if parms can be passed in registers. */
136
137int current_function_pretend_args_size;
138
139/* # of bytes of outgoing arguments required to be pushed by the prologue.
140 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
141 and no stack adjusts will be done on function calls. */
142
143int current_function_outgoing_args_size;
144
145/* This is the offset from the arg pointer to the place where the first
146 anonymous arg can be found, if there is one. */
147
148rtx current_function_arg_offset_rtx;
149
150/* Nonzero if current function uses varargs.h or equivalent.
151 Zero for functions that use stdarg.h. */
152
153int current_function_varargs;
154
155/* Quantities of various kinds of registers
156 used for the current function's args. */
157
158CUMULATIVE_ARGS current_function_args_info;
159
160/* Name of function now being compiled. */
161
162char *current_function_name;
163
164/* If non-zero, an RTL expression for that location at which the current
165 function returns its result. Always equal to
166 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
167 independently of the tree structures. */
168
169rtx current_function_return_rtx;
170
171/* Nonzero if the current function uses the constant pool. */
172
173int current_function_uses_const_pool;
174
175/* Nonzero if the current function uses pic_offset_table_rtx. */
176int current_function_uses_pic_offset_table;
177
178/* The arg pointer hard register, or the pseudo into which it was copied. */
179rtx current_function_internal_arg_pointer;
180
181/* The FUNCTION_DECL for an inline function currently being expanded. */
182tree inline_function_decl;
183
184/* Number of function calls seen so far in current function. */
185
186int function_call_count;
187
188/* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
189 (labels to which there can be nonlocal gotos from nested functions)
190 in this function. */
191
192tree nonlocal_labels;
193
194/* RTX for stack slot that holds the current handler for nonlocal gotos.
195 Zero when function does not have nonlocal labels. */
196
197rtx nonlocal_goto_handler_slot;
198
199/* RTX for stack slot that holds the stack pointer value to restore
200 for a nonlocal goto.
201 Zero when function does not have nonlocal labels. */
202
203rtx nonlocal_goto_stack_level;
204
205/* Label that will go on parm cleanup code, if any.
206 Jumping to this label runs cleanup code for parameters, if
207 such code must be run. Following this code is the logical return label. */
208
209rtx cleanup_label;
210
211/* Label that will go on function epilogue.
212 Jumping to this label serves as a "return" instruction
213 on machines which require execution of the epilogue on all returns. */
214
215rtx return_label;
216
217/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
218 So we can mark them all live at the end of the function, if nonopt. */
219rtx save_expr_regs;
220
221/* List (chain of EXPR_LISTs) of all stack slots in this function.
222 Made for the sake of unshare_all_rtl. */
223rtx stack_slot_list;
224
225/* Chain of all RTL_EXPRs that have insns in them. */
226tree rtl_expr_chain;
227
228/* Label to jump back to for tail recursion, or 0 if we have
229 not yet needed one for this function. */
230rtx tail_recursion_label;
231
232/* Place after which to insert the tail_recursion_label if we need one. */
233rtx tail_recursion_reentry;
234
235/* Location at which to save the argument pointer if it will need to be
236 referenced. There are two cases where this is done: if nonlocal gotos
237 exist, or if vars stored at an offset from the argument pointer will be
238 needed by inner routines. */
239
240rtx arg_pointer_save_area;
241
242/* Offset to end of allocated area of stack frame.
243 If stack grows down, this is the address of the last stack slot allocated.
244 If stack grows up, this is the address for the next slot. */
245int frame_offset;
246
247/* List (chain of TREE_LISTs) of static chains for containing functions.
248 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
249 in an RTL_EXPR in the TREE_VALUE. */
250static tree context_display;
251
252/* List (chain of TREE_LISTs) of trampolines for nested functions.
253 The trampoline sets up the static chain and jumps to the function.
254 We supply the trampoline's address when the function's address is requested.
255
256 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
257 in an RTL_EXPR in the TREE_VALUE. */
258static tree trampoline_list;
259
260/* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
261static rtx parm_birth_insn;
262
263#if 0
264/* Nonzero if a stack slot has been generated whose address is not
265 actually valid. It means that the generated rtl must all be scanned
266 to detect and correct the invalid addresses where they occur. */
267static int invalid_stack_slot;
268#endif
269
270/* Last insn of those whose job was to put parms into their nominal homes. */
271static rtx last_parm_insn;
272
273/* 1 + last pseudo register number used for loading a copy
274 of a parameter of this function. */
275static int max_parm_reg;
276
277/* Vector indexed by REGNO, containing location on stack in which
278 to put the parm which is nominally in pseudo register REGNO,
279 if we discover that that parm must go in the stack. */
280static rtx *parm_reg_stack_loc;
281
282#if 0 /* Turned off because 0 seems to work just as well. */
283/* Cleanup lists are required for binding levels regardless of whether
284 that binding level has cleanups or not. This node serves as the
285 cleanup list whenever an empty list is required. */
286static tree empty_cleanup_list;
287#endif
288
289/* Nonzero once virtual register instantiation has been done.
290 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
291static int virtuals_instantiated;
292
293/* Nonzero if we need to distinguish between the return value of this function
294 and the return value of a function called by this function. This helps
295 integrate.c */
296
297extern int rtx_equal_function_value_matters;
298
299void fixup_gotos ();
300
301static tree round_down ();
302static rtx round_trampoline_addr ();
303static rtx fixup_stack_1 ();
304static void fixup_var_refs ();
305static void fixup_var_refs_insns ();
306static void fixup_var_refs_1 ();
307static void optimize_bit_field ();
308static void instantiate_decls ();
309static void instantiate_decls_1 ();
310static int instantiate_virtual_regs_1 ();
311static rtx fixup_memory_subreg ();
312static rtx walk_fixup_memory_subreg ();
313\f
314/* In order to evaluate some expressions, such as function calls returning
315 structures in memory, we need to temporarily allocate stack locations.
316 We record each allocated temporary in the following structure.
317
318 Associated with each temporary slot is a nesting level. When we pop up
319 one level, all temporaries associated with the previous level are freed.
320 Normally, all temporaries are freed after the execution of the statement
321 in which they were created. However, if we are inside a ({...}) grouping,
322 the result may be in a temporary and hence must be preserved. If the
323 result could be in a temporary, we preserve it if we can determine which
324 one it is in. If we cannot determine which temporary may contain the
325 result, all temporaries are preserved. A temporary is preserved by
326 pretending it was allocated at the previous nesting level.
327
328 Automatic variables are also assigned temporary slots, at the nesting
329 level where they are defined. They are marked a "kept" so that
330 free_temp_slots will not free them. */
331
332struct temp_slot
333{
334 /* Points to next temporary slot. */
335 struct temp_slot *next;
336 /* The rtx to used to reference the slot. */
337 rtx slot;
338 /* The size, in units, of the slot. */
339 int size;
340 /* Non-zero if this temporary is currently in use. */
341 char in_use;
342 /* Nesting level at which this slot is being used. */
343 int level;
344 /* Non-zero if this should survive a call to free_temp_slots. */
345 int keep;
346};
347
348/* List of all temporaries allocated, both available and in use. */
349
350struct temp_slot *temp_slots;
351
352/* Current nesting level for temporaries. */
353
354int temp_slot_level;
355\f
356/* Pointer to chain of `struct function' for containing functions. */
357struct function *outer_function_chain;
358
359/* Given a function decl for a containing function,
360 return the `struct function' for it. */
361
362struct function *
363find_function_data (decl)
364 tree decl;
365{
366 struct function *p;
367 for (p = outer_function_chain; p; p = p->next)
368 if (p->decl == decl)
369 return p;
370 abort ();
371}
372
373/* Save the current context for compilation of a nested function.
374 This is called from language-specific code.
375 The caller is responsible for saving any language-specific status,
6dc42e49 376 since this function knows only about language-independent variables. */
6f086dfc
RS
377
378void
379push_function_context ()
380{
381 struct function *p = (struct function *) xmalloc (sizeof (struct function));
382
383 p->next = outer_function_chain;
384 outer_function_chain = p;
385
386 p->name = current_function_name;
387 p->decl = current_function_decl;
388 p->pops_args = current_function_pops_args;
389 p->returns_struct = current_function_returns_struct;
390 p->returns_pcc_struct = current_function_returns_pcc_struct;
391 p->needs_context = current_function_needs_context;
392 p->calls_setjmp = current_function_calls_setjmp;
393 p->calls_longjmp = current_function_calls_longjmp;
394 p->calls_alloca = current_function_calls_alloca;
395 p->has_nonlocal_label = current_function_has_nonlocal_label;
396 p->args_size = current_function_args_size;
397 p->pretend_args_size = current_function_pretend_args_size;
398 p->arg_offset_rtx = current_function_arg_offset_rtx;
399 p->uses_const_pool = current_function_uses_const_pool;
400 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
401 p->internal_arg_pointer = current_function_internal_arg_pointer;
402 p->max_parm_reg = max_parm_reg;
403 p->parm_reg_stack_loc = parm_reg_stack_loc;
404 p->outgoing_args_size = current_function_outgoing_args_size;
405 p->return_rtx = current_function_return_rtx;
406 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
407 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
408 p->nonlocal_labels = nonlocal_labels;
409 p->cleanup_label = cleanup_label;
410 p->return_label = return_label;
411 p->save_expr_regs = save_expr_regs;
412 p->stack_slot_list = stack_slot_list;
413 p->parm_birth_insn = parm_birth_insn;
414 p->frame_offset = frame_offset;
415 p->tail_recursion_label = tail_recursion_label;
416 p->tail_recursion_reentry = tail_recursion_reentry;
417 p->arg_pointer_save_area = arg_pointer_save_area;
418 p->rtl_expr_chain = rtl_expr_chain;
419 p->last_parm_insn = last_parm_insn;
420 p->context_display = context_display;
421 p->trampoline_list = trampoline_list;
422 p->function_call_count = function_call_count;
423 p->temp_slots = temp_slots;
424 p->temp_slot_level = temp_slot_level;
425 p->fixup_var_refs_queue = 0;
426
427 save_tree_status (p);
428 save_storage_status (p);
429 save_emit_status (p);
430 init_emit ();
431 save_expr_status (p);
432 save_stmt_status (p);
433}
434
435/* Restore the last saved context, at the end of a nested function.
436 This function is called from language-specific code. */
437
438void
439pop_function_context ()
440{
441 struct function *p = outer_function_chain;
442
443 outer_function_chain = p->next;
444
445 current_function_name = p->name;
446 current_function_decl = p->decl;
447 current_function_pops_args = p->pops_args;
448 current_function_returns_struct = p->returns_struct;
449 current_function_returns_pcc_struct = p->returns_pcc_struct;
450 current_function_needs_context = p->needs_context;
451 current_function_calls_setjmp = p->calls_setjmp;
452 current_function_calls_longjmp = p->calls_longjmp;
453 current_function_calls_alloca = p->calls_alloca;
454 current_function_has_nonlocal_label = p->has_nonlocal_label;
455 current_function_contains_functions = 1;
456 current_function_args_size = p->args_size;
457 current_function_pretend_args_size = p->pretend_args_size;
458 current_function_arg_offset_rtx = p->arg_offset_rtx;
459 current_function_uses_const_pool = p->uses_const_pool;
460 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
461 current_function_internal_arg_pointer = p->internal_arg_pointer;
462 max_parm_reg = p->max_parm_reg;
463 parm_reg_stack_loc = p->parm_reg_stack_loc;
464 current_function_outgoing_args_size = p->outgoing_args_size;
465 current_function_return_rtx = p->return_rtx;
466 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
467 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
468 nonlocal_labels = p->nonlocal_labels;
469 cleanup_label = p->cleanup_label;
470 return_label = p->return_label;
471 save_expr_regs = p->save_expr_regs;
472 stack_slot_list = p->stack_slot_list;
473 parm_birth_insn = p->parm_birth_insn;
474 frame_offset = p->frame_offset;
475 tail_recursion_label = p->tail_recursion_label;
476 tail_recursion_reentry = p->tail_recursion_reentry;
477 arg_pointer_save_area = p->arg_pointer_save_area;
478 rtl_expr_chain = p->rtl_expr_chain;
479 last_parm_insn = p->last_parm_insn;
480 context_display = p->context_display;
481 trampoline_list = p->trampoline_list;
482 function_call_count = p->function_call_count;
483 temp_slots = p->temp_slots;
484 temp_slot_level = p->temp_slot_level;
485
486 restore_tree_status (p);
487 restore_storage_status (p);
488 restore_expr_status (p);
489 restore_emit_status (p);
490 restore_stmt_status (p);
491
492 /* Finish doing put_var_into_stack for any of our variables
493 which became addressable during the nested function. */
494 {
495 struct var_refs_queue *queue = p->fixup_var_refs_queue;
496 for (; queue; queue = queue->next)
497 fixup_var_refs (queue->modified);
498 }
499
500 free (p);
501
502 /* Reset variables that have known state during rtx generation. */
503 rtx_equal_function_value_matters = 1;
504 virtuals_instantiated = 0;
505}
506\f
507/* Allocate fixed slots in the stack frame of the current function. */
508
509/* Return size needed for stack frame based on slots so far allocated.
510 This size counts from zero. It is not rounded to STACK_BOUNDARY;
511 the caller may have to do that. */
512
513int
514get_frame_size ()
515{
516#ifdef FRAME_GROWS_DOWNWARD
517 return -frame_offset;
518#else
519 return frame_offset;
520#endif
521}
522
523/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
524 with machine mode MODE.
525
526 ALIGN controls the amount of alignment for the address of the slot:
527 0 means according to MODE,
528 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
529 positive specifies alignment boundary in bits.
530
531 We do not round to stack_boundary here. */
532
533rtx
534assign_stack_local (mode, size, align)
535 enum machine_mode mode;
536 int size;
537 int align;
538{
539 register rtx x, addr;
540 int bigend_correction = 0;
541 int alignment;
542
543 if (align == 0)
544 {
545 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
546 if (mode == BLKmode)
547 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
548 }
549 else if (align == -1)
550 {
551 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
552 size = CEIL_ROUND (size, alignment);
553 }
554 else
555 alignment = align / BITS_PER_UNIT;
556
6f086dfc
RS
557 /* Round frame offset to that alignment.
558 We must be careful here, since FRAME_OFFSET might be negative and
559 division with a negative dividend isn't as well defined as we might
560 like. So we instead assume that ALIGNMENT is a power of two and
561 use logical operations which are unambiguous. */
562#ifdef FRAME_GROWS_DOWNWARD
563 frame_offset = FLOOR_ROUND (frame_offset, alignment);
564#else
565 frame_offset = CEIL_ROUND (frame_offset, alignment);
566#endif
567
568 /* On a big-endian machine, if we are allocating more space than we will use,
569 use the least significant bytes of those that are allocated. */
570#if BYTES_BIG_ENDIAN
571 if (mode != BLKmode)
572 bigend_correction = size - GET_MODE_SIZE (mode);
573#endif
574
575#ifdef FRAME_GROWS_DOWNWARD
576 frame_offset -= size;
577#endif
578
579 /* If we have already instantiated virtual registers, return the actual
580 address relative to the frame pointer. */
581 if (virtuals_instantiated)
582 addr = plus_constant (frame_pointer_rtx,
583 (frame_offset + bigend_correction
584 + STARTING_FRAME_OFFSET));
585 else
586 addr = plus_constant (virtual_stack_vars_rtx,
587 frame_offset + bigend_correction);
588
589#ifndef FRAME_GROWS_DOWNWARD
590 frame_offset += size;
591#endif
592
593 x = gen_rtx (MEM, mode, addr);
594
595 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
596
597 return x;
598}
599
600/* Assign a stack slot in a containing function.
601 First three arguments are same as in preceding function.
602 The last argument specifies the function to allocate in. */
603
604rtx
605assign_outer_stack_local (mode, size, align, function)
606 enum machine_mode mode;
607 int size;
608 int align;
609 struct function *function;
610{
611 register rtx x, addr;
612 int bigend_correction = 0;
613 int alignment;
614
615 /* Allocate in the memory associated with the function in whose frame
616 we are assigning. */
617 push_obstacks (function->function_obstack,
618 function->function_maybepermanent_obstack);
619
620 if (align == 0)
621 {
622 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
623 if (mode == BLKmode)
624 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
625 }
626 else if (align == -1)
627 {
628 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
629 size = CEIL_ROUND (size, alignment);
630 }
631 else
632 alignment = align / BITS_PER_UNIT;
633
6f086dfc
RS
634 /* Round frame offset to that alignment. */
635#ifdef FRAME_GROWS_DOWNWARD
636 frame_offset = FLOOR_ROUND (frame_offset, alignment);
637#else
638 frame_offset = CEIL_ROUND (frame_offset, alignment);
639#endif
640
641 /* On a big-endian machine, if we are allocating more space than we will use,
642 use the least significant bytes of those that are allocated. */
643#if BYTES_BIG_ENDIAN
644 if (mode != BLKmode)
645 bigend_correction = size - GET_MODE_SIZE (mode);
646#endif
647
648#ifdef FRAME_GROWS_DOWNWARD
649 function->frame_offset -= size;
650#endif
651 addr = plus_constant (virtual_stack_vars_rtx,
652 function->frame_offset + bigend_correction);
653#ifndef FRAME_GROWS_DOWNWARD
654 function->frame_offset += size;
655#endif
656
657 x = gen_rtx (MEM, mode, addr);
658
659 function->stack_slot_list
660 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
661
662 pop_obstacks ();
663
664 return x;
665}
666\f
667/* Allocate a temporary stack slot and record it for possible later
668 reuse.
669
670 MODE is the machine mode to be given to the returned rtx.
671
672 SIZE is the size in units of the space required. We do no rounding here
673 since assign_stack_local will do any required rounding.
674
675 KEEP is non-zero if this slot is to be retained after a call to
676 free_temp_slots. Automatic variables for a block are allocated with this
677 flag. */
678
679rtx
680assign_stack_temp (mode, size, keep)
681 enum machine_mode mode;
682 int size;
683 int keep;
684{
685 struct temp_slot *p, *best_p = 0;
686
687 /* First try to find an available, already-allocated temporary that is the
688 exact size we require. */
689 for (p = temp_slots; p; p = p->next)
690 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
691 break;
692
693 /* If we didn't find, one, try one that is larger than what we want. We
694 find the smallest such. */
695 if (p == 0)
696 for (p = temp_slots; p; p = p->next)
697 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
698 && (best_p == 0 || best_p->size > p->size))
699 best_p = p;
700
701 /* Make our best, if any, the one to use. */
702 if (best_p)
703 p = best_p;
704
705 /* If we still didn't find one, make a new temporary. */
706 if (p == 0)
707 {
708 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
709 p->size = size;
710 /* If the temp slot mode doesn't indicate the alignment,
711 use the largest possible, so no one will be disappointed. */
712 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
713 p->next = temp_slots;
714 temp_slots = p;
715 }
716
717 p->in_use = 1;
718 p->level = temp_slot_level;
719 p->keep = keep;
720 return p->slot;
721}
722\f
723/* If X could be a reference to a temporary slot, mark that slot as belonging
724 to the to one level higher. If X matched one of our slots, just mark that
725 one. Otherwise, we can't easily predict which it is, so upgrade all of
726 them. Kept slots need not be touched.
727
728 This is called when an ({...}) construct occurs and a statement
729 returns a value in memory. */
730
731void
732preserve_temp_slots (x)
733 rtx x;
734{
735 struct temp_slot *p;
736
737 /* If X is not in memory or is at a constant address, it cannot be in
738 a temporary slot. */
739 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
740 return;
741
742 /* First see if we can find a match. */
743 for (p = temp_slots; p; p = p->next)
744 if (p->in_use && x == p->slot)
745 {
746 p->level--;
747 return;
748 }
749
750 /* Otherwise, preserve all non-kept slots at this level. */
751 for (p = temp_slots; p; p = p->next)
752 if (p->in_use && p->level == temp_slot_level && ! p->keep)
753 p->level--;
754}
755
756/* Free all temporaries used so far. This is normally called at the end
757 of generating code for a statement. */
758
759void
760free_temp_slots ()
761{
762 struct temp_slot *p;
763
764 for (p = temp_slots; p; p = p->next)
765 if (p->in_use && p->level == temp_slot_level && ! p->keep)
766 p->in_use = 0;
767}
768
769/* Push deeper into the nesting level for stack temporaries. */
770
771void
772push_temp_slots ()
773{
774 /* For GNU C++, we must allow a sequence to be emitted anywhere in
775 the level where the sequence was started. By not changing levels
776 when the compiler is inside a sequence, the temporaries for the
777 sequence and the temporaries will not unwittingly conflict with
778 the temporaries for other sequences and/or code at that level. */
779 if (in_sequence_p ())
780 return;
781
782 temp_slot_level++;
783}
784
785/* Pop a temporary nesting level. All slots in use in the current level
786 are freed. */
787
788void
789pop_temp_slots ()
790{
791 struct temp_slot *p;
792
793 /* See comment in push_temp_slots about why we don't change levels
794 in sequences. */
795 if (in_sequence_p ())
796 return;
797
798 for (p = temp_slots; p; p = p->next)
799 if (p->in_use && p->level == temp_slot_level)
800 p->in_use = 0;
801
802 temp_slot_level--;
803}
804\f
805/* Retroactively move an auto variable from a register to a stack slot.
806 This is done when an address-reference to the variable is seen. */
807
808void
809put_var_into_stack (decl)
810 tree decl;
811{
812 register rtx reg;
813 register rtx new = 0;
814 struct function *function = 0;
815 tree context = decl_function_context (decl);
816
817 /* Get the current rtl used for this object. */
818 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
819
820 /* If this variable comes from an outer function,
821 find that function's saved context. */
822 if (context != current_function_decl)
823 for (function = outer_function_chain; function; function = function->next)
824 if (function->decl == context)
825 break;
826
827 /* No need to do anything if decl has no rtx yet
828 since in that case caller is setting TREE_ADDRESSABLE
829 and a stack slot will be assigned when the rtl is made. */
830 if (reg == 0)
831 return;
832
833 /* If this is a variable-size object with a pseudo to address it,
834 put that pseudo into the stack, if the var is nonlocal. */
a82ad570 835 if (DECL_NONLOCAL (decl)
6f086dfc
RS
836 && GET_CODE (reg) == MEM
837 && GET_CODE (XEXP (reg, 0)) == REG
838 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
839 reg = XEXP (reg, 0);
840 if (GET_CODE (reg) != REG)
841 return;
842
843 if (function)
844 {
845 if (REGNO (reg) < function->max_parm_reg)
846 new = function->parm_reg_stack_loc[REGNO (reg)];
847 if (new == 0)
848 new = assign_outer_stack_local (GET_MODE (reg),
849 GET_MODE_SIZE (GET_MODE (reg)),
850 0, function);
851 }
852 else
853 {
854 if (REGNO (reg) < max_parm_reg)
855 new = parm_reg_stack_loc[REGNO (reg)];
856 if (new == 0)
857 new = assign_stack_local (GET_MODE (reg),
858 GET_MODE_SIZE (GET_MODE (reg)),
859 0);
860 }
861
862 XEXP (reg, 0) = XEXP (new, 0);
863 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
864 REG_USERVAR_P (reg) = 0;
865 PUT_CODE (reg, MEM);
866
867 /* If this is a memory ref that contains aggregate components,
868 mark it as such for cse and loop optimize. */
869 MEM_IN_STRUCT_P (reg)
870 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
871 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
872 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
873
874 /* Now make sure that all refs to the variable, previously made
875 when it was a register, are fixed up to be valid again. */
876 if (function)
877 {
878 struct var_refs_queue *temp;
879
880 /* Variable is inherited; fix it up when we get back to its function. */
881 push_obstacks (function->function_obstack,
882 function->function_maybepermanent_obstack);
883 temp
884 = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue));
885 temp->modified = reg;
886 temp->next = function->fixup_var_refs_queue;
887 function->fixup_var_refs_queue = temp;
888 pop_obstacks ();
889 }
890 else
891 /* Variable is local; fix it up now. */
892 fixup_var_refs (reg);
893}
894\f
895static void
896fixup_var_refs (var)
897 rtx var;
898{
899 tree pending;
900 rtx first_insn = get_insns ();
901 struct sequence_stack *stack = sequence_stack;
902 tree rtl_exps = rtl_expr_chain;
903
904 /* Must scan all insns for stack-refs that exceed the limit. */
905 fixup_var_refs_insns (var, first_insn, stack == 0);
906
907 /* Scan all pending sequences too. */
908 for (; stack; stack = stack->next)
909 {
910 push_to_sequence (stack->first);
911 fixup_var_refs_insns (var, stack->first, stack->next != 0);
912 /* Update remembered end of sequence
913 in case we added an insn at the end. */
914 stack->last = get_last_insn ();
915 end_sequence ();
916 }
917
918 /* Scan all waiting RTL_EXPRs too. */
919 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
920 {
921 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
922 if (seq != const0_rtx && seq != 0)
923 {
924 push_to_sequence (seq);
925 fixup_var_refs_insns (var, seq, 0);
926 end_sequence ();
927 }
928 }
929}
930\f
931/* This structure is used by the following two functions to record MEMs or
932 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
933 VAR as an address. We need to maintain this list in case two operands of
934 an insn were required to match; in that case we must ensure we use the
935 same replacement. */
936
937struct fixup_replacement
938{
939 rtx old;
940 rtx new;
941 struct fixup_replacement *next;
942};
943
944/* REPLACEMENTS is a pointer to a list of the above structures and X is
945 some part of an insn. Return a struct fixup_replacement whose OLD
946 value is equal to X. Allocate a new structure if no such entry exists. */
947
948static struct fixup_replacement *
949find_replacement (replacements, x)
950 struct fixup_replacement **replacements;
951 rtx x;
952{
953 struct fixup_replacement *p;
954
955 /* See if we have already replaced this. */
956 for (p = *replacements; p && p->old != x; p = p->next)
957 ;
958
959 if (p == 0)
960 {
961 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
962 p->old = x;
963 p->new = 0;
964 p->next = *replacements;
965 *replacements = p;
966 }
967
968 return p;
969}
970
971/* Scan the insn-chain starting with INSN for refs to VAR
972 and fix them up. TOPLEVEL is nonzero if this chain is the
973 main chain of insns for the current function. */
974
975static void
976fixup_var_refs_insns (var, insn, toplevel)
977 rtx var;
978 rtx insn;
979 int toplevel;
980{
981 while (insn)
982 {
983 rtx next = NEXT_INSN (insn);
984 rtx note;
985 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
986 || GET_CODE (insn) == JUMP_INSN)
987 {
988 /* The insn to load VAR from a home in the arglist
989 is now a no-op. When we see it, just delete it. */
990 if (toplevel
991 && GET_CODE (PATTERN (insn)) == SET
992 && SET_DEST (PATTERN (insn)) == var
993 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
994 {
995 next = delete_insn (insn);
996 if (insn == last_parm_insn)
997 last_parm_insn = PREV_INSN (next);
998 }
999 else
1000 {
1001 /* See if we have to do anything to INSN now that VAR is in
1002 memory. If it needs to be loaded into a pseudo, use a single
1003 pseudo for the entire insn in case there is a MATCH_DUP
1004 between two operands. We pass a pointer to the head of
1005 a list of struct fixup_replacements. If fixup_var_refs_1
1006 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1007 it will record them in this list.
1008
1009 If it allocated a pseudo for any replacement, we copy into
1010 it here. */
1011
1012 struct fixup_replacement *replacements = 0;
1013
1014 fixup_var_refs_1 (var, &PATTERN (insn), insn, &replacements);
1015
1016 while (replacements)
1017 {
1018 if (GET_CODE (replacements->new) == REG)
1019 {
1020 rtx insert_before;
1021
1022 /* OLD might be a (subreg (mem)). */
1023 if (GET_CODE (replacements->old) == SUBREG)
1024 replacements->old
1025 = fixup_memory_subreg (replacements->old, insn, 0);
1026 else
1027 replacements->old
1028 = fixup_stack_1 (replacements->old, insn);
1029
1030 /* We can not separate USE insns from the CALL_INSN
1031 that they belong to. If this is a CALL_INSN, insert
b335c2cc 1032 the move insn before the USE insns preceding it
6f086dfc
RS
1033 instead of immediately before the insn. */
1034 if (GET_CODE (insn) == CALL_INSN)
1035 {
1036 insert_before = insn;
1037 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1038 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1039 insert_before = PREV_INSN (insert_before);
1040 }
1041 else
1042 insert_before = insn;
1043
1044 emit_insn_before (gen_move_insn (replacements->new,
1045 replacements->old),
1046 insert_before);
1047 }
1048
1049 replacements = replacements->next;
1050 }
1051 }
1052
1053 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1054 But don't touch other insns referred to by reg-notes;
1055 we will get them elsewhere. */
1056 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1057 if (GET_CODE (note) != INSN_LIST)
1058 XEXP (note, 0) = walk_fixup_memory_subreg (XEXP (note, 0), insn);
1059 }
1060 insn = next;
1061 }
1062}
1063\f
1064/* VAR is a MEM that used to be a pseudo register. See if the rtx expression
1065 at *LOC in INSN needs to be changed.
1066
1067 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1068 contain a list of original rtx's and replacements. If we find that we need
1069 to modify this insn by replacing a memory reference with a pseudo or by
1070 making a new MEM to implement a SUBREG, we consult that list to see if
1071 we have already chosen a replacement. If none has already been allocated,
1072 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1073 or the SUBREG, as appropriate, to the pseudo. */
1074
1075static void
1076fixup_var_refs_1 (var, loc, insn, replacements)
1077 register rtx var;
1078 register rtx *loc;
1079 rtx insn;
1080 struct fixup_replacement **replacements;
1081{
1082 register int i;
1083 register rtx x = *loc;
1084 RTX_CODE code = GET_CODE (x);
1085 register char *fmt;
1086 register rtx tem, tem1;
1087 struct fixup_replacement *replacement;
1088
1089 switch (code)
1090 {
1091 case MEM:
1092 if (var == x)
1093 {
1094 /* If we already have a replacement, use it. Otherwise,
1095 try to fix up this address in case it is invalid. */
1096
1097 replacement = find_replacement (replacements, var);
1098 if (replacement->new)
1099 {
1100 *loc = replacement->new;
1101 return;
1102 }
1103
1104 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1105
1106 /* Unless we are forcing memory to register, we can leave things
1107 the way they are if the insn is valid. */
1108
1109 INSN_CODE (insn) = -1;
1110 if (! flag_force_mem && recog_memoized (insn) >= 0)
1111 return;
1112
1113 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1114 return;
1115 }
1116
1117 /* If X contains VAR, we need to unshare it here so that we update
1118 each occurrence separately. But all identical MEMs in one insn
1119 must be replaced with the same rtx because of the possibility of
1120 MATCH_DUPs. */
1121
1122 if (reg_mentioned_p (var, x))
1123 {
1124 replacement = find_replacement (replacements, x);
1125 if (replacement->new == 0)
1126 replacement->new = copy_most_rtx (x, var);
1127
1128 *loc = x = replacement->new;
1129 }
1130 break;
1131
1132 case REG:
1133 case CC0:
1134 case PC:
1135 case CONST_INT:
1136 case CONST:
1137 case SYMBOL_REF:
1138 case LABEL_REF:
1139 case CONST_DOUBLE:
1140 return;
1141
1142 case SIGN_EXTRACT:
1143 case ZERO_EXTRACT:
1144 /* Note that in some cases those types of expressions are altered
1145 by optimize_bit_field, and do not survive to get here. */
1146 if (XEXP (x, 0) == var
1147 || (GET_CODE (XEXP (x, 0)) == SUBREG
1148 && SUBREG_REG (XEXP (x, 0)) == var))
1149 {
1150 /* Get TEM as a valid MEM in the mode presently in the insn.
1151
1152 We don't worry about the possibility of MATCH_DUP here; it
1153 is highly unlikely and would be tricky to handle. */
1154
1155 tem = XEXP (x, 0);
1156 if (GET_CODE (tem) == SUBREG)
1157 tem = fixup_memory_subreg (tem, insn, 1);
1158 tem = fixup_stack_1 (tem, insn);
1159
1160 /* Unless we want to load from memory, get TEM into the proper mode
1161 for an extract from memory. This can only be done if the
1162 extract is at a constant position and length. */
1163
1164 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1165 && GET_CODE (XEXP (x, 2)) == CONST_INT
1166 && ! mode_dependent_address_p (XEXP (tem, 0))
1167 && ! MEM_VOLATILE_P (tem))
1168 {
1169 enum machine_mode wanted_mode = VOIDmode;
1170 enum machine_mode is_mode = GET_MODE (tem);
1171 int width = INTVAL (XEXP (x, 1));
1172 int pos = INTVAL (XEXP (x, 2));
1173
1174#ifdef HAVE_extzv
1175 if (GET_CODE (x) == ZERO_EXTRACT)
1176 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1177#endif
1178#ifdef HAVE_extv
1179 if (GET_CODE (x) == SIGN_EXTRACT)
1180 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1181#endif
6dc42e49 1182 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
1183 if (wanted_mode != VOIDmode
1184 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1185 {
1186 int offset = pos / BITS_PER_UNIT;
1187 rtx old_pos = XEXP (x, 2);
1188 rtx newmem;
1189
1190 /* If the bytes and bits are counted differently, we
1191 must adjust the offset. */
1192#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1193 offset = (GET_MODE_SIZE (is_mode)
1194 - GET_MODE_SIZE (wanted_mode) - offset);
1195#endif
1196
1197 pos %= GET_MODE_BITSIZE (wanted_mode);
1198
1199 newmem = gen_rtx (MEM, wanted_mode,
1200 plus_constant (XEXP (tem, 0), offset));
1201 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1202 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1203 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1204
1205 /* Make the change and see if the insn remains valid. */
1206 INSN_CODE (insn) = -1;
1207 XEXP (x, 0) = newmem;
5f4f0e22 1208 XEXP (x, 2) = GEN_INT (pos);
6f086dfc
RS
1209
1210 if (recog_memoized (insn) >= 0)
1211 return;
1212
1213 /* Otherwise, restore old position. XEXP (x, 0) will be
1214 restored later. */
1215 XEXP (x, 2) = old_pos;
1216 }
1217 }
1218
1219 /* If we get here, the bitfield extract insn can't accept a memory
1220 reference. Copy the input into a register. */
1221
1222 tem1 = gen_reg_rtx (GET_MODE (tem));
1223 emit_insn_before (gen_move_insn (tem1, tem), insn);
1224 XEXP (x, 0) = tem1;
1225 return;
1226 }
1227 break;
1228
1229 case SUBREG:
1230 if (SUBREG_REG (x) == var)
1231 {
1232 /* If this SUBREG makes VAR wider, it has become a paradoxical
1233 SUBREG with VAR in memory, but these aren't allowed at this
1234 stage of the compilation. So load VAR into a pseudo and take
1235 a SUBREG of that pseudo. */
1236 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1237 {
1238 replacement = find_replacement (replacements, var);
1239 if (replacement->new == 0)
1240 replacement->new = gen_reg_rtx (GET_MODE (var));
1241 SUBREG_REG (x) = replacement->new;
1242 return;
1243 }
1244
1245 /* See if we have already found a replacement for this SUBREG.
1246 If so, use it. Otherwise, make a MEM and see if the insn
1247 is recognized. If not, or if we should force MEM into a register,
1248 make a pseudo for this SUBREG. */
1249 replacement = find_replacement (replacements, x);
1250 if (replacement->new)
1251 {
1252 *loc = replacement->new;
1253 return;
1254 }
1255
1256 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1257
1258 if (! flag_force_mem && recog_memoized (insn) >= 0)
1259 return;
1260
1261 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1262 return;
1263 }
1264 break;
1265
1266 case SET:
1267 /* First do special simplification of bit-field references. */
1268 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1269 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1270 optimize_bit_field (x, insn, 0);
1271 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1272 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
5f4f0e22 1273 optimize_bit_field (x, insn, NULL_PTR);
6f086dfc
RS
1274
1275 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1276 insn into a pseudo and store the low part of the pseudo into VAR. */
1277 if (GET_CODE (SET_DEST (x)) == SUBREG
1278 && SUBREG_REG (SET_DEST (x)) == var
1279 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1280 > GET_MODE_SIZE (GET_MODE (var))))
1281 {
1282 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1283 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1284 tem)),
1285 insn);
1286 break;
1287 }
1288
1289 {
1290 rtx dest = SET_DEST (x);
1291 rtx src = SET_SRC (x);
1292 rtx outerdest = dest;
1293
1294 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1295 || GET_CODE (dest) == SIGN_EXTRACT
1296 || GET_CODE (dest) == ZERO_EXTRACT)
1297 dest = XEXP (dest, 0);
1298
1299 if (GET_CODE (src) == SUBREG)
1300 src = XEXP (src, 0);
1301
1302 /* If VAR does not appear at the top level of the SET
1303 just scan the lower levels of the tree. */
1304
1305 if (src != var && dest != var)
1306 break;
1307
1308 /* We will need to rerecognize this insn. */
1309 INSN_CODE (insn) = -1;
1310
1311#ifdef HAVE_insv
1312 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1313 {
1314 /* Since this case will return, ensure we fixup all the
1315 operands here. */
1316 fixup_var_refs_1 (var, &XEXP (outerdest, 1), insn, replacements);
1317 fixup_var_refs_1 (var, &XEXP (outerdest, 2), insn, replacements);
1318 fixup_var_refs_1 (var, &SET_SRC (x), insn, replacements);
1319
1320 tem = XEXP (outerdest, 0);
1321
1322 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1323 that may appear inside a ZERO_EXTRACT.
1324 This was legitimate when the MEM was a REG. */
1325 if (GET_CODE (tem) == SUBREG
1326 && SUBREG_REG (tem) == var)
1327 tem = fixup_memory_subreg (tem, insn, 1);
1328 else
1329 tem = fixup_stack_1 (tem, insn);
1330
1331 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1332 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1333 && ! mode_dependent_address_p (XEXP (tem, 0))
1334 && ! MEM_VOLATILE_P (tem))
1335 {
1336 enum machine_mode wanted_mode
1337 = insn_operand_mode[(int) CODE_FOR_insv][0];
1338 enum machine_mode is_mode = GET_MODE (tem);
1339 int width = INTVAL (XEXP (outerdest, 1));
1340 int pos = INTVAL (XEXP (outerdest, 2));
1341
6dc42e49 1342 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
1343 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1344 {
1345 int offset = pos / BITS_PER_UNIT;
1346 rtx old_pos = XEXP (outerdest, 2);
1347 rtx newmem;
1348
1349#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1350 offset = (GET_MODE_SIZE (is_mode)
1351 - GET_MODE_SIZE (wanted_mode) - offset);
1352#endif
1353
1354 pos %= GET_MODE_BITSIZE (wanted_mode);
1355
1356 newmem = gen_rtx (MEM, wanted_mode,
1357 plus_constant (XEXP (tem, 0), offset));
1358 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1359 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1360 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1361
1362 /* Make the change and see if the insn remains valid. */
1363 INSN_CODE (insn) = -1;
1364 XEXP (outerdest, 0) = newmem;
5f4f0e22 1365 XEXP (outerdest, 2) = GEN_INT (pos);
6f086dfc
RS
1366
1367 if (recog_memoized (insn) >= 0)
1368 return;
1369
1370 /* Otherwise, restore old position. XEXP (x, 0) will be
1371 restored later. */
1372 XEXP (outerdest, 2) = old_pos;
1373 }
1374 }
1375
1376 /* If we get here, the bit-field store doesn't allow memory
1377 or isn't located at a constant position. Load the value into
1378 a register, do the store, and put it back into memory. */
1379
1380 tem1 = gen_reg_rtx (GET_MODE (tem));
1381 emit_insn_before (gen_move_insn (tem1, tem), insn);
1382 emit_insn_after (gen_move_insn (tem, tem1), insn);
1383 XEXP (outerdest, 0) = tem1;
1384 return;
1385 }
1386#endif
1387
1388 /* STRICT_LOW_PART is a no-op on memory references
1389 and it can cause combinations to be unrecognizable,
1390 so eliminate it. */
1391
1392 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1393 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1394
1395 /* A valid insn to copy VAR into or out of a register
1396 must be left alone, to avoid an infinite loop here.
1397 If the reference to VAR is by a subreg, fix that up,
1398 since SUBREG is not valid for a memref.
1399 Also fix up the address of the stack slot. */
1400
1401 if ((SET_SRC (x) == var
1402 || (GET_CODE (SET_SRC (x)) == SUBREG
1403 && SUBREG_REG (SET_SRC (x)) == var))
1404 && (GET_CODE (SET_DEST (x)) == REG
1405 || (GET_CODE (SET_DEST (x)) == SUBREG
1406 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1407 && recog_memoized (insn) >= 0)
1408 {
1409 replacement = find_replacement (replacements, SET_SRC (x));
1410 if (replacement->new)
1411 {
1412 SET_SRC (x) = replacement->new;
1413 return;
1414 }
1415 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1416 SET_SRC (x) = replacement->new
1417 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1418 else
1419 SET_SRC (x) = replacement->new
1420 = fixup_stack_1 (SET_SRC (x), insn);
1421 return;
1422 }
1423
1424 if ((SET_DEST (x) == var
1425 || (GET_CODE (SET_DEST (x)) == SUBREG
1426 && SUBREG_REG (SET_DEST (x)) == var))
1427 && (GET_CODE (SET_SRC (x)) == REG
1428 || (GET_CODE (SET_SRC (x)) == SUBREG
1429 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1430 && recog_memoized (insn) >= 0)
1431 {
1432 if (GET_CODE (SET_DEST (x)) == SUBREG)
1433 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1434 else
1435 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1436 return;
1437 }
1438
1439 /* Otherwise, storing into VAR must be handled specially
1440 by storing into a temporary and copying that into VAR
1441 with a new insn after this one. */
1442
1443 if (dest == var)
1444 {
1445 rtx temp;
1446 rtx fixeddest;
1447 tem = SET_DEST (x);
1448 /* STRICT_LOW_PART can be discarded, around a MEM. */
1449 if (GET_CODE (tem) == STRICT_LOW_PART)
1450 tem = XEXP (tem, 0);
1451 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
1452 if (GET_CODE (tem) == SUBREG)
1453 fixeddest = fixup_memory_subreg (tem, insn, 0);
1454 else
1455 fixeddest = fixup_stack_1 (tem, insn);
1456
1457 temp = gen_reg_rtx (GET_MODE (tem));
1458 emit_insn_after (gen_move_insn (fixeddest, temp), insn);
1459 SET_DEST (x) = temp;
1460 }
1461 }
1462 }
1463
1464 /* Nothing special about this RTX; fix its operands. */
1465
1466 fmt = GET_RTX_FORMAT (code);
1467 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1468 {
1469 if (fmt[i] == 'e')
1470 fixup_var_refs_1 (var, &XEXP (x, i), insn, replacements);
1471 if (fmt[i] == 'E')
1472 {
1473 register int j;
1474 for (j = 0; j < XVECLEN (x, i); j++)
1475 fixup_var_refs_1 (var, &XVECEXP (x, i, j), insn, replacements);
1476 }
1477 }
1478}
1479\f
1480/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1481 return an rtx (MEM:m1 newaddr) which is equivalent.
1482 If any insns must be emitted to compute NEWADDR, put them before INSN.
1483
1484 UNCRITICAL nonzero means accept paradoxical subregs.
1485 This is used for subregs found inside of ZERO_EXTRACTs. */
1486
1487static rtx
1488fixup_memory_subreg (x, insn, uncritical)
1489 rtx x;
1490 rtx insn;
1491 int uncritical;
1492{
1493 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1494 rtx addr = XEXP (SUBREG_REG (x), 0);
1495 enum machine_mode mode = GET_MODE (x);
1496 rtx saved, result;
1497
1498 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1499 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1500 && ! uncritical)
1501 abort ();
1502
1503#if BYTES_BIG_ENDIAN
1504 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1505 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1506#endif
1507 addr = plus_constant (addr, offset);
1508 if (!flag_force_addr && memory_address_p (mode, addr))
1509 /* Shortcut if no insns need be emitted. */
1510 return change_address (SUBREG_REG (x), mode, addr);
1511 start_sequence ();
1512 result = change_address (SUBREG_REG (x), mode, addr);
1513 emit_insn_before (gen_sequence (), insn);
1514 end_sequence ();
1515 return result;
1516}
1517
1518/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1519 Replace subexpressions of X in place.
1520 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1521 Otherwise return X, with its contents possibly altered.
1522
1523 If any insns must be emitted to compute NEWADDR, put them before INSN. */
1524
1525static rtx
1526walk_fixup_memory_subreg (x, insn)
1527 register rtx x;
1528 rtx insn;
1529{
1530 register enum rtx_code code;
1531 register char *fmt;
1532 register int i;
1533
1534 if (x == 0)
1535 return 0;
1536
1537 code = GET_CODE (x);
1538
1539 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
1540 return fixup_memory_subreg (x, insn, 0);
1541
1542 /* Nothing special about this RTX; fix its operands. */
1543
1544 fmt = GET_RTX_FORMAT (code);
1545 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1546 {
1547 if (fmt[i] == 'e')
1548 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn);
1549 if (fmt[i] == 'E')
1550 {
1551 register int j;
1552 for (j = 0; j < XVECLEN (x, i); j++)
1553 XVECEXP (x, i, j)
1554 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn);
1555 }
1556 }
1557 return x;
1558}
1559\f
1560#if 0
1561/* Fix up any references to stack slots that are invalid memory addresses
1562 because they exceed the maximum range of a displacement. */
1563
1564void
1565fixup_stack_slots ()
1566{
1567 register rtx insn;
1568
1569 /* Did we generate a stack slot that is out of range
1570 or otherwise has an invalid address? */
1571 if (invalid_stack_slot)
1572 {
1573 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1574 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1575 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1576 || GET_CODE (insn) == JUMP_INSN)
1577 fixup_stack_1 (PATTERN (insn), insn);
1578 }
1579}
1580#endif
1581
1582/* For each memory ref within X, if it refers to a stack slot
1583 with an out of range displacement, put the address in a temp register
1584 (emitting new insns before INSN to load these registers)
1585 and alter the memory ref to use that register.
1586 Replace each such MEM rtx with a copy, to avoid clobberage. */
1587
1588static rtx
1589fixup_stack_1 (x, insn)
1590 rtx x;
1591 rtx insn;
1592{
1593 register int i;
1594 register RTX_CODE code = GET_CODE (x);
1595 register char *fmt;
1596
1597 if (code == MEM)
1598 {
1599 register rtx ad = XEXP (x, 0);
1600 /* If we have address of a stack slot but it's not valid
1601 (displacement is too large), compute the sum in a register. */
1602 if (GET_CODE (ad) == PLUS
1603 && GET_CODE (XEXP (ad, 0)) == REG
1604 && REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1605 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER
1606 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1607 {
1608 rtx temp, seq;
1609 if (memory_address_p (GET_MODE (x), ad))
1610 return x;
1611
1612 start_sequence ();
1613 temp = copy_to_reg (ad);
1614 seq = gen_sequence ();
1615 end_sequence ();
1616 emit_insn_before (seq, insn);
1617 return change_address (x, VOIDmode, temp);
1618 }
1619 return x;
1620 }
1621
1622 fmt = GET_RTX_FORMAT (code);
1623 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1624 {
1625 if (fmt[i] == 'e')
1626 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1627 if (fmt[i] == 'E')
1628 {
1629 register int j;
1630 for (j = 0; j < XVECLEN (x, i); j++)
1631 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
1632 }
1633 }
1634 return x;
1635}
1636\f
1637/* Optimization: a bit-field instruction whose field
1638 happens to be a byte or halfword in memory
1639 can be changed to a move instruction.
1640
1641 We call here when INSN is an insn to examine or store into a bit-field.
1642 BODY is the SET-rtx to be altered.
1643
1644 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1645 (Currently this is called only from function.c, and EQUIV_MEM
1646 is always 0.) */
1647
1648static void
1649optimize_bit_field (body, insn, equiv_mem)
1650 rtx body;
1651 rtx insn;
1652 rtx *equiv_mem;
1653{
1654 register rtx bitfield;
1655 int destflag;
1656 rtx seq = 0;
1657 enum machine_mode mode;
1658
1659 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
1660 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
1661 bitfield = SET_DEST (body), destflag = 1;
1662 else
1663 bitfield = SET_SRC (body), destflag = 0;
1664
1665 /* First check that the field being stored has constant size and position
1666 and is in fact a byte or halfword suitably aligned. */
1667
1668 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
1669 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
1670 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
1671 != BLKmode)
1672 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
1673 {
1674 register rtx memref = 0;
1675
1676 /* Now check that the containing word is memory, not a register,
1677 and that it is safe to change the machine mode. */
1678
1679 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
1680 memref = XEXP (bitfield, 0);
1681 else if (GET_CODE (XEXP (bitfield, 0)) == REG
1682 && equiv_mem != 0)
1683 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
1684 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1685 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
1686 memref = SUBREG_REG (XEXP (bitfield, 0));
1687 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1688 && equiv_mem != 0
1689 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
1690 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
1691
1692 if (memref
1693 && ! mode_dependent_address_p (XEXP (memref, 0))
1694 && ! MEM_VOLATILE_P (memref))
1695 {
1696 /* Now adjust the address, first for any subreg'ing
1697 that we are now getting rid of,
1698 and then for which byte of the word is wanted. */
1699
1700 register int offset = INTVAL (XEXP (bitfield, 2));
1701 /* Adjust OFFSET to count bits from low-address byte. */
1702#if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1703 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
1704 - offset - INTVAL (XEXP (bitfield, 1)));
1705#endif
1706 /* Adjust OFFSET to count bytes from low-address byte. */
1707 offset /= BITS_PER_UNIT;
1708 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
1709 {
1710 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
1711#if BYTES_BIG_ENDIAN
1712 offset -= (MIN (UNITS_PER_WORD,
1713 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
1714 - MIN (UNITS_PER_WORD,
1715 GET_MODE_SIZE (GET_MODE (memref))));
1716#endif
1717 }
1718
1719 memref = change_address (memref, mode,
1720 plus_constant (XEXP (memref, 0), offset));
1721
1722 /* Store this memory reference where
1723 we found the bit field reference. */
1724
1725 if (destflag)
1726 {
1727 validate_change (insn, &SET_DEST (body), memref, 1);
1728 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
1729 {
1730 rtx src = SET_SRC (body);
1731 while (GET_CODE (src) == SUBREG
1732 && SUBREG_WORD (src) == 0)
1733 src = SUBREG_REG (src);
1734 if (GET_MODE (src) != GET_MODE (memref))
1735 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
1736 validate_change (insn, &SET_SRC (body), src, 1);
1737 }
1738 else if (GET_MODE (SET_SRC (body)) != VOIDmode
1739 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
1740 /* This shouldn't happen because anything that didn't have
1741 one of these modes should have got converted explicitly
1742 and then referenced through a subreg.
1743 This is so because the original bit-field was
1744 handled by agg_mode and so its tree structure had
1745 the same mode that memref now has. */
1746 abort ();
1747 }
1748 else
1749 {
1750 rtx dest = SET_DEST (body);
1751
1752 while (GET_CODE (dest) == SUBREG
1753 && SUBREG_WORD (dest) == 0)
1754 dest = SUBREG_REG (dest);
1755
1756 validate_change (insn, &SET_DEST (body), dest, 1);
1757
1758 if (GET_MODE (dest) == GET_MODE (memref))
1759 validate_change (insn, &SET_SRC (body), memref, 1);
1760 else
1761 {
1762 /* Convert the mem ref to the destination mode. */
1763 rtx newreg = gen_reg_rtx (GET_MODE (dest));
1764
1765 start_sequence ();
1766 convert_move (newreg, memref,
1767 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
1768 seq = get_insns ();
1769 end_sequence ();
1770
1771 validate_change (insn, &SET_SRC (body), newreg, 1);
1772 }
1773 }
1774
1775 /* See if we can convert this extraction or insertion into
1776 a simple move insn. We might not be able to do so if this
1777 was, for example, part of a PARALLEL.
1778
1779 If we succeed, write out any needed conversions. If we fail,
1780 it is hard to guess why we failed, so don't do anything
1781 special; just let the optimization be suppressed. */
1782
1783 if (apply_change_group () && seq)
1784 emit_insns_before (seq, insn);
1785 }
1786 }
1787}
1788\f
1789/* These routines are responsible for converting virtual register references
1790 to the actual hard register references once RTL generation is complete.
1791
1792 The following four variables are used for communication between the
1793 routines. They contain the offsets of the virtual registers from their
1794 respective hard registers. */
1795
1796static int in_arg_offset;
1797static int var_offset;
1798static int dynamic_offset;
1799static int out_arg_offset;
1800
1801/* In most machines, the stack pointer register is equivalent to the bottom
1802 of the stack. */
1803
1804#ifndef STACK_POINTER_OFFSET
1805#define STACK_POINTER_OFFSET 0
1806#endif
1807
1808/* If not defined, pick an appropriate default for the offset of dynamically
1809 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1810 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1811
1812#ifndef STACK_DYNAMIC_OFFSET
1813
1814#ifdef ACCUMULATE_OUTGOING_ARGS
1815/* The bottom of the stack points to the actual arguments. If
1816 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1817 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1818 stack space for register parameters is not pushed by the caller, but
1819 rather part of the fixed stack areas and hence not included in
1820 `current_function_outgoing_args_size'. Nevertheless, we must allow
1821 for it when allocating stack dynamic objects. */
1822
1823#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1824#define STACK_DYNAMIC_OFFSET(FNDECL) \
1825(current_function_outgoing_args_size \
1826 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
1827
1828#else
1829#define STACK_DYNAMIC_OFFSET(FNDECL) \
1830(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
1831#endif
1832
1833#else
1834#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
1835#endif
1836#endif
1837
1838/* Pass through the INSNS of function FNDECL and convert virtual register
1839 references to hard register references. */
1840
1841void
1842instantiate_virtual_regs (fndecl, insns)
1843 tree fndecl;
1844 rtx insns;
1845{
1846 rtx insn;
1847
1848 /* Compute the offsets to use for this function. */
1849 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
1850 var_offset = STARTING_FRAME_OFFSET;
1851 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
1852 out_arg_offset = STACK_POINTER_OFFSET;
1853
1854 /* Scan all variables and parameters of this function. For each that is
1855 in memory, instantiate all virtual registers if the result is a valid
1856 address. If not, we do it later. That will handle most uses of virtual
1857 regs on many machines. */
1858 instantiate_decls (fndecl, 1);
1859
1860 /* Initialize recognition, indicating that volatile is OK. */
1861 init_recog ();
1862
1863 /* Scan through all the insns, instantiating every virtual register still
1864 present. */
1865 for (insn = insns; insn; insn = NEXT_INSN (insn))
1866 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1867 || GET_CODE (insn) == CALL_INSN)
1868 {
1869 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
5f4f0e22 1870 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
6f086dfc
RS
1871 }
1872
1873 /* Now instantiate the remaining register equivalences for debugging info.
1874 These will not be valid addresses. */
1875 instantiate_decls (fndecl, 0);
1876
1877 /* Indicate that, from now on, assign_stack_local should use
1878 frame_pointer_rtx. */
1879 virtuals_instantiated = 1;
1880}
1881
1882/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1883 all virtual registers in their DECL_RTL's.
1884
1885 If VALID_ONLY, do this only if the resulting address is still valid.
1886 Otherwise, always do it. */
1887
1888static void
1889instantiate_decls (fndecl, valid_only)
1890 tree fndecl;
1891 int valid_only;
1892{
1893 tree decl;
1894
a82ad570 1895 if (DECL_INLINE (fndecl))
6f086dfc
RS
1896 /* When compiling an inline function, the obstack used for
1897 rtl allocation is the maybepermanent_obstack. Calling
1898 `resume_temporary_allocation' switches us back to that
1899 obstack while we process this function's parameters. */
1900 resume_temporary_allocation ();
1901
1902 /* Process all parameters of the function. */
1903 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1904 {
0c2e838b
RK
1905 if (DECL_RTL (decl) && GET_CODE (DECL_RTL (decl)) == MEM
1906 && (! valid_only
1907 || ! mode_dependent_address_p (XEXP (DECL_RTL (decl), 0))))
6f086dfc 1908 instantiate_virtual_regs_1 (&XEXP (DECL_RTL (decl), 0),
5f4f0e22
CH
1909 (valid_only ? DECL_RTL (decl) : NULL_RTX),
1910 0);
6f086dfc 1911 if (DECL_INCOMING_RTL (decl)
0c2e838b
RK
1912 && GET_CODE (DECL_INCOMING_RTL (decl)) == MEM
1913 && (! valid_only
1914 || ! mode_dependent_address_p (XEXP (DECL_INCOMING_RTL (decl), 0))))
6f086dfc 1915 instantiate_virtual_regs_1 (&XEXP (DECL_INCOMING_RTL (decl), 0),
5f4f0e22
CH
1916 (valid_only ? DECL_INCOMING_RTL (decl)
1917 : NULL_RTX),
6f086dfc
RS
1918 0);
1919 }
1920
1921 /* Now process all variables defined in the function or its subblocks. */
1922 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1923
a82ad570 1924 if (DECL_INLINE (fndecl))
6f086dfc
RS
1925 {
1926 /* Save all rtl allocated for this function by raising the
1927 high-water mark on the maybepermanent_obstack. */
1928 preserve_data ();
1929 /* All further rtl allocation is now done in the current_obstack. */
1930 rtl_in_current_obstack ();
1931 }
1932}
1933
1934/* Subroutine of instantiate_decls: Process all decls in the given
1935 BLOCK node and all its subblocks. */
1936
1937static void
1938instantiate_decls_1 (let, valid_only)
1939 tree let;
1940 int valid_only;
1941{
1942 tree t;
1943
1944 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
0c2e838b
RK
1945 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM
1946 && (! valid_only
1947 || ! mode_dependent_address_p (XEXP (DECL_RTL (t), 0))))
6f086dfc 1948 instantiate_virtual_regs_1 (& XEXP (DECL_RTL (t), 0),
5f4f0e22 1949 valid_only ? DECL_RTL (t) : NULL_RTX, 0);
6f086dfc
RS
1950
1951 /* Process all subblocks. */
1952 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1953 instantiate_decls_1 (t, valid_only);
1954}
1955\f
1956/* Given a pointer to a piece of rtx and an optional pointer to the
1957 containing object, instantiate any virtual registers present in it.
1958
1959 If EXTRA_INSNS, we always do the replacement and generate
1960 any extra insns before OBJECT. If it zero, we do nothing if replacement
1961 is not valid.
1962
1963 Return 1 if we either had nothing to do or if we were able to do the
1964 needed replacement. Return 0 otherwise; we only return zero if
1965 EXTRA_INSNS is zero.
1966
1967 We first try some simple transformations to avoid the creation of extra
1968 pseudos. */
1969
1970static int
1971instantiate_virtual_regs_1 (loc, object, extra_insns)
1972 rtx *loc;
1973 rtx object;
1974 int extra_insns;
1975{
1976 rtx x;
1977 RTX_CODE code;
1978 rtx new = 0;
1979 int offset;
1980 rtx temp;
1981 rtx seq;
1982 int i, j;
1983 char *fmt;
1984
1985 /* Re-start here to avoid recursion in common cases. */
1986 restart:
1987
1988 x = *loc;
1989 if (x == 0)
1990 return 1;
1991
1992 code = GET_CODE (x);
1993
1994 /* Check for some special cases. */
1995 switch (code)
1996 {
1997 case CONST_INT:
1998 case CONST_DOUBLE:
1999 case CONST:
2000 case SYMBOL_REF:
2001 case CODE_LABEL:
2002 case PC:
2003 case CC0:
2004 case ASM_INPUT:
2005 case ADDR_VEC:
2006 case ADDR_DIFF_VEC:
2007 case RETURN:
2008 return 1;
2009
2010 case SET:
2011 /* We are allowed to set the virtual registers. This means that
2012 that the actual register should receive the source minus the
2013 appropriate offset. This is used, for example, in the handling
2014 of non-local gotos. */
2015 if (SET_DEST (x) == virtual_incoming_args_rtx)
2016 new = arg_pointer_rtx, offset = - in_arg_offset;
2017 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2018 new = frame_pointer_rtx, offset = - var_offset;
2019 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2020 new = stack_pointer_rtx, offset = - dynamic_offset;
2021 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2022 new = stack_pointer_rtx, offset = - out_arg_offset;
2023
2024 if (new)
2025 {
2026 /* The only valid sources here are PLUS or REG. Just do
2027 the simplest possible thing to handle them. */
2028 if (GET_CODE (SET_SRC (x)) != REG
2029 && GET_CODE (SET_SRC (x)) != PLUS)
2030 abort ();
2031
2032 start_sequence ();
2033 if (GET_CODE (SET_SRC (x)) != REG)
5f4f0e22 2034 temp = force_operand (SET_SRC (x), NULL_RTX);
6f086dfc
RS
2035 else
2036 temp = SET_SRC (x);
5f4f0e22 2037 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
6f086dfc
RS
2038 seq = get_insns ();
2039 end_sequence ();
2040
2041 emit_insns_before (seq, object);
2042 SET_DEST (x) = new;
2043
2044 if (!validate_change (object, &SET_SRC (x), temp, 0)
2045 || ! extra_insns)
2046 abort ();
2047
2048 return 1;
2049 }
2050
2051 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2052 loc = &SET_SRC (x);
2053 goto restart;
2054
2055 case PLUS:
2056 /* Handle special case of virtual register plus constant. */
2057 if (CONSTANT_P (XEXP (x, 1)))
2058 {
2059 rtx old;
2060
2061 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2062 if (GET_CODE (XEXP (x, 0)) == PLUS)
2063 {
2064 rtx inner = XEXP (XEXP (x, 0), 0);
2065
2066 if (inner == virtual_incoming_args_rtx)
2067 new = arg_pointer_rtx, offset = in_arg_offset;
2068 else if (inner == virtual_stack_vars_rtx)
2069 new = frame_pointer_rtx, offset = var_offset;
2070 else if (inner == virtual_stack_dynamic_rtx)
2071 new = stack_pointer_rtx, offset = dynamic_offset;
2072 else if (inner == virtual_outgoing_args_rtx)
2073 new = stack_pointer_rtx, offset = out_arg_offset;
2074 else
2075 {
2076 loc = &XEXP (x, 0);
2077 goto restart;
2078 }
2079
2080 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2081 extra_insns);
2082 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2083 }
2084
2085 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2086 new = arg_pointer_rtx, offset = in_arg_offset;
2087 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2088 new = frame_pointer_rtx, offset = var_offset;
2089 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2090 new = stack_pointer_rtx, offset = dynamic_offset;
2091 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2092 new = stack_pointer_rtx, offset = out_arg_offset;
2093 else
2094 {
2095 /* We know the second operand is a constant. Unless the
2096 first operand is a REG (which has been already checked),
2097 it needs to be checked. */
2098 if (GET_CODE (XEXP (x, 0)) != REG)
2099 {
2100 loc = &XEXP (x, 0);
2101 goto restart;
2102 }
2103 return 1;
2104 }
2105
2106 old = XEXP (x, 0);
2107 XEXP (x, 0) = new;
2108 new = plus_constant (XEXP (x, 1), offset);
2109
2110 /* If the new constant is zero, try to replace the sum with its
2111 first operand. */
2112 if (new == const0_rtx
2113 && validate_change (object, loc, XEXP (x, 0), 0))
2114 return 1;
2115
2116 /* Next try to replace constant with new one. */
2117 if (!validate_change (object, &XEXP (x, 1), new, 0))
2118 {
2119 if (! extra_insns)
2120 {
2121 XEXP (x, 0) = old;
2122 return 0;
2123 }
2124
2125 /* Otherwise copy the new constant into a register and replace
2126 constant with that register. */
2127 temp = gen_reg_rtx (Pmode);
2128 if (validate_change (object, &XEXP (x, 1), temp, 0))
2129 emit_insn_before (gen_move_insn (temp, new), object);
2130 else
2131 {
2132 /* If that didn't work, replace this expression with a
2133 register containing the sum. */
2134
2135 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2136 XEXP (x, 0) = old;
2137
2138 start_sequence ();
5f4f0e22 2139 temp = force_operand (new, NULL_RTX);
6f086dfc
RS
2140 seq = get_insns ();
2141 end_sequence ();
2142
2143 emit_insns_before (seq, object);
2144 if (! validate_change (object, loc, temp, 0)
2145 && ! validate_replace_rtx (x, temp, object))
2146 abort ();
2147 }
2148 }
2149
2150 return 1;
2151 }
2152
2153 /* Fall through to generic two-operand expression case. */
2154 case EXPR_LIST:
2155 case CALL:
2156 case COMPARE:
2157 case MINUS:
2158 case MULT:
2159 case DIV: case UDIV:
2160 case MOD: case UMOD:
2161 case AND: case IOR: case XOR:
2162 case LSHIFT: case ASHIFT: case ROTATE:
2163 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2164 case NE: case EQ:
2165 case GE: case GT: case GEU: case GTU:
2166 case LE: case LT: case LEU: case LTU:
2167 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2168 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2169 loc = &XEXP (x, 0);
2170 goto restart;
2171
2172 case MEM:
2173 /* Most cases of MEM that convert to valid addresses have already been
2174 handled by our scan of regno_reg_rtx. The only special handling we
2175 need here is to make a copy of the rtx to ensure it isn't being
b335c2cc 2176 shared if we have to change it to a pseudo.
6f086dfc
RS
2177
2178 If the rtx is a simple reference to an address via a virtual register,
2179 it can potentially be shared. In such cases, first try to make it
2180 a valid address, which can also be shared. Otherwise, copy it and
2181 proceed normally.
2182
2183 First check for common cases that need no processing. These are
2184 usually due to instantiation already being done on a previous instance
2185 of a shared rtx. */
2186
2187 temp = XEXP (x, 0);
2188 if (CONSTANT_ADDRESS_P (temp)
2189#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2190 || temp == arg_pointer_rtx
2191#endif
2192 || temp == frame_pointer_rtx)
2193 return 1;
2194
2195 if (GET_CODE (temp) == PLUS
2196 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2197 && (XEXP (temp, 0) == frame_pointer_rtx
2198#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2199 || XEXP (temp, 0) == arg_pointer_rtx
2200#endif
2201 ))
2202 return 1;
2203
2204 if (temp == virtual_stack_vars_rtx
2205 || temp == virtual_incoming_args_rtx
2206 || (GET_CODE (temp) == PLUS
2207 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2208 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2209 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2210 {
2211 /* This MEM may be shared. If the substitution can be done without
2212 the need to generate new pseudos, we want to do it in place
2213 so all copies of the shared rtx benefit. The call below will
2214 only make substitutions if the resulting address is still
2215 valid.
2216
2217 Note that we cannot pass X as the object in the recursive call
2218 since the insn being processed may not allow all valid
6461be14
RS
2219 addresses. However, if we were not passed on object, we can
2220 only modify X without copying it if X will have a valid
2221 address.
6f086dfc 2222
6461be14
RS
2223 ??? Also note that this can still lose if OBJECT is an insn that
2224 has less restrictions on an address that some other insn.
2225 In that case, we will modify the shared address. This case
2226 doesn't seem very likely, though. */
2227
2228 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2229 object ? object : x, 0))
6f086dfc
RS
2230 return 1;
2231
2232 /* Otherwise make a copy and process that copy. We copy the entire
2233 RTL expression since it might be a PLUS which could also be
2234 shared. */
2235 *loc = x = copy_rtx (x);
2236 }
2237
2238 /* Fall through to generic unary operation case. */
2239 case USE:
2240 case CLOBBER:
2241 case SUBREG:
2242 case STRICT_LOW_PART:
2243 case NEG: case NOT:
2244 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2245 case SIGN_EXTEND: case ZERO_EXTEND:
2246 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2247 case FLOAT: case FIX:
2248 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2249 case ABS:
2250 case SQRT:
2251 case FFS:
2252 /* These case either have just one operand or we know that we need not
2253 check the rest of the operands. */
2254 loc = &XEXP (x, 0);
2255 goto restart;
2256
2257 case REG:
2258 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2259 in front of this insn and substitute the temporary. */
2260 if (x == virtual_incoming_args_rtx)
2261 new = arg_pointer_rtx, offset = in_arg_offset;
2262 else if (x == virtual_stack_vars_rtx)
2263 new = frame_pointer_rtx, offset = var_offset;
2264 else if (x == virtual_stack_dynamic_rtx)
2265 new = stack_pointer_rtx, offset = dynamic_offset;
2266 else if (x == virtual_outgoing_args_rtx)
2267 new = stack_pointer_rtx, offset = out_arg_offset;
2268
2269 if (new)
2270 {
2271 temp = plus_constant (new, offset);
2272 if (!validate_change (object, loc, temp, 0))
2273 {
2274 if (! extra_insns)
2275 return 0;
2276
2277 start_sequence ();
5f4f0e22 2278 temp = force_operand (temp, NULL_RTX);
6f086dfc
RS
2279 seq = get_insns ();
2280 end_sequence ();
2281
2282 emit_insns_before (seq, object);
2283 if (! validate_change (object, loc, temp, 0)
2284 && ! validate_replace_rtx (x, temp, object))
2285 abort ();
2286 }
2287 }
2288
2289 return 1;
2290 }
2291
2292 /* Scan all subexpressions. */
2293 fmt = GET_RTX_FORMAT (code);
2294 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2295 if (*fmt == 'e')
2296 {
2297 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2298 return 0;
2299 }
2300 else if (*fmt == 'E')
2301 for (j = 0; j < XVECLEN (x, i); j++)
2302 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2303 extra_insns))
2304 return 0;
2305
2306 return 1;
2307}
2308\f
2309/* Optimization: assuming this function does not receive nonlocal gotos,
2310 delete the handlers for such, as well as the insns to establish
2311 and disestablish them. */
2312
2313static void
2314delete_handlers ()
2315{
2316 rtx insn;
2317 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2318 {
2319 /* Delete the handler by turning off the flag that would
2320 prevent jump_optimize from deleting it.
2321 Also permit deletion of the nonlocal labels themselves
2322 if nothing local refers to them. */
2323 if (GET_CODE (insn) == CODE_LABEL)
2324 LABEL_PRESERVE_P (insn) = 0;
2325 if (GET_CODE (insn) == INSN
59257ff7
RK
2326 && ((nonlocal_goto_handler_slot != 0
2327 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
2328 || (nonlocal_goto_stack_level != 0
2329 && reg_mentioned_p (nonlocal_goto_stack_level,
2330 PATTERN (insn)))))
6f086dfc
RS
2331 delete_insn (insn);
2332 }
2333}
2334
2335/* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2336 of the current function. */
2337
2338rtx
2339nonlocal_label_rtx_list ()
2340{
2341 tree t;
2342 rtx x = 0;
2343
2344 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2345 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2346
2347 return x;
2348}
2349\f
2350/* Output a USE for any register use in RTL.
2351 This is used with -noreg to mark the extent of lifespan
2352 of any registers used in a user-visible variable's DECL_RTL. */
2353
2354void
2355use_variable (rtl)
2356 rtx rtl;
2357{
2358 if (GET_CODE (rtl) == REG)
2359 /* This is a register variable. */
2360 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2361 else if (GET_CODE (rtl) == MEM
2362 && GET_CODE (XEXP (rtl, 0)) == REG
2363 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2364 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2365 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2366 /* This is a variable-sized structure. */
2367 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2368}
2369
2370/* Like use_variable except that it outputs the USEs after INSN
2371 instead of at the end of the insn-chain. */
2372
2373void
2374use_variable_after (rtl, insn)
2375 rtx rtl, insn;
2376{
2377 if (GET_CODE (rtl) == REG)
2378 /* This is a register variable. */
2379 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2380 else if (GET_CODE (rtl) == MEM
2381 && GET_CODE (XEXP (rtl, 0)) == REG
2382 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2383 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2384 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2385 /* This is a variable-sized structure. */
2386 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2387}
2388\f
2389int
2390max_parm_reg_num ()
2391{
2392 return max_parm_reg;
2393}
2394
2395/* Return the first insn following those generated by `assign_parms'. */
2396
2397rtx
2398get_first_nonparm_insn ()
2399{
2400 if (last_parm_insn)
2401 return NEXT_INSN (last_parm_insn);
2402 return get_insns ();
2403}
2404
2405/* Return 1 if EXP returns an aggregate value, for which an address
2406 must be passed to the function or returned by the function. */
2407
2408int
2409aggregate_value_p (exp)
2410 tree exp;
2411{
2412 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2413 return 1;
2414 if (RETURN_IN_MEMORY (TREE_TYPE (exp)))
2415 return 1;
2416 if (flag_pcc_struct_return
2417 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2418 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE))
2419 return 1;
2420 return 0;
2421}
2422\f
2423/* Assign RTL expressions to the function's parameters.
2424 This may involve copying them into registers and using
2425 those registers as the RTL for them.
2426
2427 If SECOND_TIME is non-zero it means that this function is being
2428 called a second time. This is done by integrate.c when a function's
2429 compilation is deferred. We need to come back here in case the
2430 FUNCTION_ARG macro computes items needed for the rest of the compilation
2431 (such as changing which registers are fixed or caller-saved). But suppress
2432 writing any insns or setting DECL_RTL of anything in this case. */
2433
2434void
2435assign_parms (fndecl, second_time)
2436 tree fndecl;
2437 int second_time;
2438{
2439 register tree parm;
2440 register rtx entry_parm = 0;
2441 register rtx stack_parm = 0;
2442 CUMULATIVE_ARGS args_so_far;
2443 enum machine_mode passed_mode, nominal_mode;
2444 /* Total space needed so far for args on the stack,
2445 given as a constant and a tree-expression. */
2446 struct args_size stack_args_size;
2447 tree fntype = TREE_TYPE (fndecl);
2448 tree fnargs = DECL_ARGUMENTS (fndecl);
2449 /* This is used for the arg pointer when referring to stack args. */
2450 rtx internal_arg_pointer;
2451 /* This is a dummy PARM_DECL that we used for the function result if
2452 the function returns a structure. */
2453 tree function_result_decl = 0;
2454 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2455 int varargs_setup = 0;
2456
2457 /* Nonzero if the last arg is named `__builtin_va_alist',
2458 which is used on some machines for old-fashioned non-ANSI varargs.h;
2459 this should be stuck onto the stack as if it had arrived there. */
2460 int vararg
2461 = (fnargs
2462 && (parm = tree_last (fnargs)) != 0
2463 && DECL_NAME (parm)
2464 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2465 "__builtin_va_alist")));
2466
2467 /* Nonzero if function takes extra anonymous args.
2468 This means the last named arg must be on the stack
2469 right before the anonymous ones. */
2470 int stdarg
2471 = (TYPE_ARG_TYPES (fntype) != 0
2472 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2473 != void_type_node));
2474
2475 /* If the reg that the virtual arg pointer will be translated into is
2476 not a fixed reg or is the stack pointer, make a copy of the virtual
2477 arg pointer, and address parms via the copy. The frame pointer is
2478 considered fixed even though it is not marked as such.
2479
2480 The second time through, simply use ap to avoid generating rtx. */
2481
2482 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2483 || ! (fixed_regs[ARG_POINTER_REGNUM]
2484 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2485 && ! second_time)
2486 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2487 else
2488 internal_arg_pointer = virtual_incoming_args_rtx;
2489 current_function_internal_arg_pointer = internal_arg_pointer;
2490
2491 stack_args_size.constant = 0;
2492 stack_args_size.var = 0;
2493
2494 /* If struct value address is treated as the first argument, make it so. */
2495 if (aggregate_value_p (DECL_RESULT (fndecl))
2496 && ! current_function_returns_pcc_struct
2497 && struct_value_incoming_rtx == 0)
2498 {
2499 tree type = build_pointer_type (fntype);
2500
5f4f0e22 2501 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
6f086dfc
RS
2502
2503 DECL_ARG_TYPE (function_result_decl) = type;
2504 TREE_CHAIN (function_result_decl) = fnargs;
2505 fnargs = function_result_decl;
2506 }
2507
2508 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2509 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2510
2511#ifdef INIT_CUMULATIVE_INCOMING_ARGS
5f4f0e22 2512 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_PTR);
6f086dfc 2513#else
5f4f0e22 2514 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_PTR);
6f086dfc
RS
2515#endif
2516
2517 /* We haven't yet found an argument that we must push and pretend the
2518 caller did. */
2519 current_function_pretend_args_size = 0;
2520
2521 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2522 {
2523 int aggregate
2524 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2525 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
2526 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE);
2527 struct args_size stack_offset;
2528 struct args_size arg_size;
2529 int passed_pointer = 0;
2530 tree passed_type = DECL_ARG_TYPE (parm);
2531
2532 /* Set LAST_NAMED if this is last named arg before some
2533 anonymous args. We treat it as if it were anonymous too. */
2534 int last_named = ((TREE_CHAIN (parm) == 0
2535 || DECL_NAME (TREE_CHAIN (parm)) == 0)
2536 && (vararg || stdarg));
2537
2538 if (TREE_TYPE (parm) == error_mark_node
2539 /* This can happen after weird syntax errors
2540 or if an enum type is defined among the parms. */
2541 || TREE_CODE (parm) != PARM_DECL
2542 || passed_type == NULL)
2543 {
2544 DECL_RTL (parm) = gen_rtx (MEM, BLKmode, const0_rtx);
2545 TREE_USED (parm) = 1;
2546 continue;
2547 }
2548
2549 /* For varargs.h function, save info about regs and stack space
2550 used by the individual args, not including the va_alist arg. */
2551 if (vararg && last_named)
2552 current_function_args_info = args_so_far;
2553
2554 /* Find mode of arg as it is passed, and mode of arg
2555 as it should be during execution of this function. */
2556 passed_mode = TYPE_MODE (passed_type);
2557 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
2558
16bae307
RS
2559 /* If the parm's mode is VOID, its value doesn't matter,
2560 and avoid the usual things like emit_move_insn that could crash. */
2561 if (nominal_mode == VOIDmode)
2562 {
2563 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
2564 continue;
2565 }
2566
6f086dfc
RS
2567#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2568 /* See if this arg was passed by invisible reference. */
2569 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2570 passed_type, ! last_named))
2571 {
2572 passed_type = build_pointer_type (passed_type);
2573 passed_pointer = 1;
2574 passed_mode = nominal_mode = Pmode;
2575 }
2576#endif
2577
2578 /* Let machine desc say which reg (if any) the parm arrives in.
2579 0 means it arrives on the stack. */
2580#ifdef FUNCTION_INCOMING_ARG
2581 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2582 passed_type, ! last_named);
2583#else
2584 entry_parm = FUNCTION_ARG (args_so_far, passed_mode,
2585 passed_type, ! last_named);
2586#endif
2587
2588#ifdef SETUP_INCOMING_VARARGS
2589 /* If this is the last named parameter, do any required setup for
2590 varargs or stdargs. We need to know about the case of this being an
2591 addressable type, in which case we skip the registers it
2592 would have arrived in.
2593
2594 For stdargs, LAST_NAMED will be set for two parameters, the one that
2595 is actually the last named, and the dummy parameter. We only
2596 want to do this action once.
2597
2598 Also, indicate when RTL generation is to be suppressed. */
2599 if (last_named && !varargs_setup)
2600 {
2601 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
2602 current_function_pretend_args_size,
2603 second_time);
2604 varargs_setup = 1;
2605 }
2606#endif
2607
2608 /* Determine parm's home in the stack,
2609 in case it arrives in the stack or we should pretend it did.
2610
2611 Compute the stack position and rtx where the argument arrives
2612 and its size.
2613
2614 There is one complexity here: If this was a parameter that would
2615 have been passed in registers, but wasn't only because it is
2616 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2617 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2618 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2619 0 as it was the previous time. */
2620
2621 locate_and_pad_parm (passed_mode, passed_type,
2622#ifdef STACK_PARMS_IN_REG_PARM_AREA
2623 1,
2624#else
2625#ifdef FUNCTION_INCOMING_ARG
2626 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2627 passed_type,
2628 (! last_named
2629 || varargs_setup)) != 0,
2630#else
2631 FUNCTION_ARG (args_so_far, passed_mode,
2632 passed_type,
2633 ! last_named || varargs_setup) != 0,
2634#endif
2635#endif
2636 fndecl, &stack_args_size, &stack_offset, &arg_size);
2637
2638 if (! second_time)
2639 {
2640 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
2641
2642 if (offset_rtx == const0_rtx)
2643 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
2644 else
2645 stack_parm = gen_rtx (MEM, passed_mode,
2646 gen_rtx (PLUS, Pmode,
2647 internal_arg_pointer, offset_rtx));
2648
2649 /* If this is a memory ref that contains aggregate components,
2650 mark it as such for cse and loop optimize. */
2651 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2652 }
2653
2654 /* If this parameter was passed both in registers and in the stack,
2655 use the copy on the stack. */
2656 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
2657 entry_parm = 0;
2658
2659 /* If this parm was passed part in regs and part in memory,
2660 pretend it arrived entirely in memory
2661 by pushing the register-part onto the stack.
2662
2663 In the special case of a DImode or DFmode that is split,
2664 we could put it together in a pseudoreg directly,
2665 but for now that's not worth bothering with. */
2666
2667 if (entry_parm)
2668 {
2669 int nregs = 0;
2670#ifdef FUNCTION_ARG_PARTIAL_NREGS
2671 nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
2672 passed_type, ! last_named);
2673#endif
2674
2675 if (nregs > 0)
2676 {
2677 current_function_pretend_args_size
2678 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
2679 / (PARM_BOUNDARY / BITS_PER_UNIT)
2680 * (PARM_BOUNDARY / BITS_PER_UNIT));
2681
2682 if (! second_time)
2683 move_block_from_reg (REGNO (entry_parm),
2684 validize_mem (stack_parm), nregs);
2685 entry_parm = stack_parm;
2686 }
2687 }
2688
2689 /* If we didn't decide this parm came in a register,
2690 by default it came on the stack. */
2691 if (entry_parm == 0)
2692 entry_parm = stack_parm;
2693
2694 /* Record permanently how this parm was passed. */
2695 if (! second_time)
2696 DECL_INCOMING_RTL (parm) = entry_parm;
2697
2698 /* If there is actually space on the stack for this parm,
2699 count it in stack_args_size; otherwise set stack_parm to 0
2700 to indicate there is no preallocated stack slot for the parm. */
2701
2702 if (entry_parm == stack_parm
d9ca49d5 2703#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
6f086dfc 2704 /* On some machines, even if a parm value arrives in a register
d9ca49d5
JW
2705 there is still an (uninitialized) stack slot allocated for it.
2706
2707 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
2708 whether this parameter already has a stack slot allocated,
2709 because an arg block exists only if current_function_args_size
2710 is larger than some threshhold, and we haven't calculated that
2711 yet. So, for now, we just assume that stack slots never exist
2712 in this case. */
6f086dfc
RS
2713 || REG_PARM_STACK_SPACE (fndecl) > 0
2714#endif
2715 )
2716 {
2717 stack_args_size.constant += arg_size.constant;
2718 if (arg_size.var)
2719 ADD_PARM_SIZE (stack_args_size, arg_size.var);
2720 }
2721 else
2722 /* No stack slot was pushed for this parm. */
2723 stack_parm = 0;
2724
2725 /* Update info on where next arg arrives in registers. */
2726
2727 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
2728 passed_type, ! last_named);
2729
2730 /* If this is our second time through, we are done with this parm. */
2731 if (second_time)
2732 continue;
2733
e16c591a
RS
2734 /* If we can't trust the parm stack slot to be aligned enough
2735 for its ultimate type, don't use that slot after entry.
2736 We'll make another stack slot, if we need one. */
2737 {
2738#ifdef FUNCTION_ARG_BOUNDARY
2739 int thisparm_boundary
2740 = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type);
2741#else
2742 int thisparm_boundary = PARM_BOUNDARY;
2743#endif
2744
2745 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
2746 stack_parm = 0;
2747 }
2748
6f086dfc
RS
2749 /* Now adjust STACK_PARM to the mode and precise location
2750 where this parameter should live during execution,
2751 if we discover that it must live in the stack during execution.
2752 To make debuggers happier on big-endian machines, we store
2753 the value in the last bytes of the space available. */
2754
2755 if (nominal_mode != BLKmode && nominal_mode != passed_mode
2756 && stack_parm != 0)
2757 {
2758 rtx offset_rtx;
2759
2760#if BYTES_BIG_ENDIAN
2761 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
2762 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
2763 - GET_MODE_SIZE (nominal_mode));
2764#endif
2765
2766 offset_rtx = ARGS_SIZE_RTX (stack_offset);
2767 if (offset_rtx == const0_rtx)
2768 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
2769 else
2770 stack_parm = gen_rtx (MEM, nominal_mode,
2771 gen_rtx (PLUS, Pmode,
2772 internal_arg_pointer, offset_rtx));
2773
2774 /* If this is a memory ref that contains aggregate components,
2775 mark it as such for cse and loop optimize. */
2776 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2777 }
2778
2779 /* ENTRY_PARM is an RTX for the parameter as it arrives,
2780 in the mode in which it arrives.
2781 STACK_PARM is an RTX for a stack slot where the parameter can live
2782 during the function (in case we want to put it there).
2783 STACK_PARM is 0 if no stack slot was pushed for it.
2784
2785 Now output code if necessary to convert ENTRY_PARM to
2786 the type in which this function declares it,
2787 and store that result in an appropriate place,
2788 which may be a pseudo reg, may be STACK_PARM,
2789 or may be a local stack slot if STACK_PARM is 0.
2790
2791 Set DECL_RTL to that place. */
2792
2793 if (nominal_mode == BLKmode)
2794 {
2795 /* If a BLKmode arrives in registers, copy it to a stack slot. */
2796 if (GET_CODE (entry_parm) == REG)
2797 {
2798 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
2799 UNITS_PER_WORD);
2800
2801 /* Note that we will be storing an integral number of words.
2802 So we have to be careful to ensure that we allocate an
2803 integral number of words. We do this below in the
2804 assign_stack_local if space was not allocated in the argument
2805 list. If it was, this will not work if PARM_BOUNDARY is not
2806 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2807 if it becomes a problem. */
2808
2809 if (stack_parm == 0)
2810 stack_parm
2811 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
2812 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
2813 abort ();
2814
2815 move_block_from_reg (REGNO (entry_parm),
2816 validize_mem (stack_parm),
2817 size_stored / UNITS_PER_WORD);
2818 }
2819 DECL_RTL (parm) = stack_parm;
2820 }
74bd77a8 2821 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
a82ad570 2822 && ! DECL_INLINE (fndecl))
6f086dfc
RS
2823 /* layout_decl may set this. */
2824 || TREE_ADDRESSABLE (parm)
2825 || TREE_SIDE_EFFECTS (parm)
2826 /* If -ffloat-store specified, don't put explicit
2827 float variables into registers. */
2828 || (flag_float_store
2829 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
2830 /* Always assign pseudo to structure return or item passed
2831 by invisible reference. */
2832 || passed_pointer || parm == function_result_decl)
2833 {
2834 /* Store the parm in a pseudoregister during the function. */
2835 register rtx parmreg = gen_reg_rtx (nominal_mode);
2836
2837 REG_USERVAR_P (parmreg) = 1;
2838
2839 /* If this was an item that we received a pointer to, set DECL_RTL
2840 appropriately. */
2841 if (passed_pointer)
2842 {
2843 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
2844 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
2845 }
2846 else
2847 DECL_RTL (parm) = parmreg;
2848
2849 /* Copy the value into the register. */
2850 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
86f8eff3
RK
2851 {
2852 /* If ENTRY_PARM is a hard register, it might be in a register
2853 not valid for operating in its mode (e.g., an odd-numbered
2854 register for a DFmode). In that case, moves are the only
2855 thing valid, so we can't do a convert from there. This
2856 occurs when the calling sequence allow such misaligned
2857 usages. */
2858 if (GET_CODE (entry_parm) == REG
2859 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2860 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm),
2861 GET_MODE (entry_parm)))
5f4f0e22 2862 convert_move (parmreg, copy_to_reg (entry_parm), 0);
86f8eff3
RK
2863 else
2864 convert_move (parmreg, validize_mem (entry_parm), 0);
2865 }
6f086dfc
RS
2866 else
2867 emit_move_insn (parmreg, validize_mem (entry_parm));
2868
74bd77a8
RS
2869 /* If we were passed a pointer but the actual value
2870 can safely live in a register, put it in one. */
16bae307 2871 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
74bd77a8
RS
2872 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
2873 && ! DECL_INLINE (fndecl))
2874 /* layout_decl may set this. */
2875 || TREE_ADDRESSABLE (parm)
2876 || TREE_SIDE_EFFECTS (parm)
2877 /* If -ffloat-store specified, don't put explicit
2878 float variables into registers. */
2879 || (flag_float_store
2880 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
2881 {
2654605a
JW
2882 /* We can't use nominal_mode, because it will have been set to
2883 Pmode above. We must use the actual mode of the parm. */
2884 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
74bd77a8
RS
2885 emit_move_insn (parmreg, DECL_RTL (parm));
2886 DECL_RTL (parm) = parmreg;
2887 }
2888
6f086dfc
RS
2889 /* In any case, record the parm's desired stack location
2890 in case we later discover it must live in the stack. */
2891 if (REGNO (parmreg) >= nparmregs)
2892 {
2893 rtx *new;
2894 nparmregs = REGNO (parmreg) + 5;
2895 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
2896 bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx));
2897 parm_reg_stack_loc = new;
2898 }
2899 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
2900
2901 /* Mark the register as eliminable if we did no conversion
2902 and it was copied from memory at a fixed offset,
2903 and the arg pointer was not copied to a pseudo-reg.
2904 If the arg pointer is a pseudo reg or the offset formed
2905 an invalid address, such memory-equivalences
2906 as we make here would screw up life analysis for it. */
2907 if (nominal_mode == passed_mode
2908 && GET_CODE (entry_parm) == MEM
e16c591a 2909 && entry_parm == stack_parm
6f086dfc
RS
2910 && stack_offset.var == 0
2911 && reg_mentioned_p (virtual_incoming_args_rtx,
2912 XEXP (entry_parm, 0)))
2913 REG_NOTES (get_last_insn ())
2914 = gen_rtx (EXPR_LIST, REG_EQUIV,
2915 entry_parm, REG_NOTES (get_last_insn ()));
2916
2917 /* For pointer data type, suggest pointer register. */
2918 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
2919 mark_reg_pointer (parmreg);
2920 }
2921 else
2922 {
2923 /* Value must be stored in the stack slot STACK_PARM
2924 during function execution. */
2925
2926 if (passed_mode != nominal_mode)
86f8eff3
RK
2927 {
2928 /* Conversion is required. */
2929 if (GET_CODE (entry_parm) == REG
2930 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2931 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm), passed_mode))
2932 entry_parm = copy_to_reg (entry_parm);
2933
2934 entry_parm = convert_to_mode (nominal_mode, entry_parm, 0);
2935 }
6f086dfc
RS
2936
2937 if (entry_parm != stack_parm)
2938 {
2939 if (stack_parm == 0)
2940 stack_parm = assign_stack_local (GET_MODE (entry_parm),
2941 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
2942 emit_move_insn (validize_mem (stack_parm),
2943 validize_mem (entry_parm));
2944 }
2945
2946 DECL_RTL (parm) = stack_parm;
2947 }
2948
2949 /* If this "parameter" was the place where we are receiving the
2950 function's incoming structure pointer, set up the result. */
2951 if (parm == function_result_decl)
2952 DECL_RTL (DECL_RESULT (fndecl))
2953 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
2954
2955 if (TREE_THIS_VOLATILE (parm))
2956 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
2957 if (TREE_READONLY (parm))
2958 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
2959 }
2960
2961 max_parm_reg = max_reg_num ();
2962 last_parm_insn = get_last_insn ();
2963
2964 current_function_args_size = stack_args_size.constant;
2965
2966 /* Adjust function incoming argument size for alignment and
2967 minimum length. */
2968
2969#ifdef REG_PARM_STACK_SPACE
6f90e075 2970#ifndef MAYBE_REG_PARM_STACK_SPACE
6f086dfc
RS
2971 current_function_args_size = MAX (current_function_args_size,
2972 REG_PARM_STACK_SPACE (fndecl));
2973#endif
6f90e075 2974#endif
6f086dfc
RS
2975
2976#ifdef STACK_BOUNDARY
2977#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2978
2979 current_function_args_size
2980 = ((current_function_args_size + STACK_BYTES - 1)
2981 / STACK_BYTES) * STACK_BYTES;
2982#endif
2983
2984#ifdef ARGS_GROW_DOWNWARD
2985 current_function_arg_offset_rtx
5f4f0e22 2986 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
6f086dfc
RS
2987 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
2988 size_int (-stack_args_size.constant)),
5f4f0e22 2989 NULL_RTX, VOIDmode, 0));
6f086dfc
RS
2990#else
2991 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
2992#endif
2993
2994 /* See how many bytes, if any, of its args a function should try to pop
2995 on return. */
2996
2997 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
2998 current_function_args_size);
2999
3000 /* For stdarg.h function, save info about regs and stack space
3001 used by the named args. */
3002
3003 if (stdarg)
3004 current_function_args_info = args_so_far;
3005
3006 /* Set the rtx used for the function return value. Put this in its
3007 own variable so any optimizers that need this information don't have
3008 to include tree.h. Do this here so it gets done when an inlined
3009 function gets output. */
3010
3011 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
3012}
3013\f
3014/* Compute the size and offset from the start of the stacked arguments for a
3015 parm passed in mode PASSED_MODE and with type TYPE.
3016
3017 INITIAL_OFFSET_PTR points to the current offset into the stacked
3018 arguments.
3019
3020 The starting offset and size for this parm are returned in *OFFSET_PTR
3021 and *ARG_SIZE_PTR, respectively.
3022
3023 IN_REGS is non-zero if the argument will be passed in registers. It will
3024 never be set if REG_PARM_STACK_SPACE is not defined.
3025
3026 FNDECL is the function in which the argument was defined.
3027
3028 There are two types of rounding that are done. The first, controlled by
3029 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3030 list to be aligned to the specific boundary (in bits). This rounding
3031 affects the initial and starting offsets, but not the argument size.
3032
3033 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3034 optionally rounds the size of the parm to PARM_BOUNDARY. The
3035 initial offset is not affected by this rounding, while the size always
3036 is and the starting offset may be. */
3037
3038/* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
3039 initial_offset_ptr is positive because locate_and_pad_parm's
3040 callers pass in the total size of args so far as
3041 initial_offset_ptr. arg_size_ptr is always positive.*/
3042
3043static void pad_to_arg_alignment (), pad_below ();
3044
3045void
3046locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
3047 initial_offset_ptr, offset_ptr, arg_size_ptr)
3048 enum machine_mode passed_mode;
3049 tree type;
3050 int in_regs;
3051 tree fndecl;
3052 struct args_size *initial_offset_ptr;
3053 struct args_size *offset_ptr;
3054 struct args_size *arg_size_ptr;
3055{
3056 tree sizetree
3057 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3058 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3059 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3060 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3061 int reg_parm_stack_space = 0;
3062
3063#ifdef REG_PARM_STACK_SPACE
3064 /* If we have found a stack parm before we reach the end of the
3065 area reserved for registers, skip that area. */
3066 if (! in_regs)
3067 {
29008b51
JW
3068#ifdef MAYBE_REG_PARM_STACK_SPACE
3069 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3070#else
6f086dfc 3071 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
29008b51 3072#endif
6f086dfc
RS
3073 if (reg_parm_stack_space > 0)
3074 {
3075 if (initial_offset_ptr->var)
3076 {
3077 initial_offset_ptr->var
3078 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3079 size_int (reg_parm_stack_space));
3080 initial_offset_ptr->constant = 0;
3081 }
3082 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3083 initial_offset_ptr->constant = reg_parm_stack_space;
3084 }
3085 }
3086#endif /* REG_PARM_STACK_SPACE */
3087
3088 arg_size_ptr->var = 0;
3089 arg_size_ptr->constant = 0;
3090
3091#ifdef ARGS_GROW_DOWNWARD
3092 if (initial_offset_ptr->var)
3093 {
3094 offset_ptr->constant = 0;
3095 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3096 initial_offset_ptr->var);
3097 }
3098 else
3099 {
3100 offset_ptr->constant = - initial_offset_ptr->constant;
3101 offset_ptr->var = 0;
3102 }
3103 if (where_pad == upward
3104 && (TREE_CODE (sizetree) != INTEGER_CST
3105 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3106 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3107 SUB_PARM_SIZE (*offset_ptr, sizetree);
3108 pad_to_arg_alignment (offset_ptr, boundary);
3109 if (initial_offset_ptr->var)
3110 {
3111 arg_size_ptr->var = size_binop (MINUS_EXPR,
3112 size_binop (MINUS_EXPR,
3113 integer_zero_node,
3114 initial_offset_ptr->var),
3115 offset_ptr->var);
3116 }
3117 else
3118 {
3119 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3120 offset_ptr->constant);
3121 }
3122/* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3123 if (where_pad == downward)
3124 pad_below (arg_size_ptr, passed_mode, sizetree);
3125#else /* !ARGS_GROW_DOWNWARD */
3126 pad_to_arg_alignment (initial_offset_ptr, boundary);
3127 *offset_ptr = *initial_offset_ptr;
3128 if (where_pad == downward)
3129 pad_below (offset_ptr, passed_mode, sizetree);
3130
3131#ifdef PUSH_ROUNDING
3132 if (passed_mode != BLKmode)
3133 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3134#endif
3135
3136 if (where_pad != none
3137 && (TREE_CODE (sizetree) != INTEGER_CST
3138 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3139 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3140
3141 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3142#endif /* ARGS_GROW_DOWNWARD */
3143}
3144
e16c591a
RS
3145/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3146 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3147
6f086dfc
RS
3148static void
3149pad_to_arg_alignment (offset_ptr, boundary)
3150 struct args_size *offset_ptr;
3151 int boundary;
3152{
3153 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3154
3155 if (boundary > BITS_PER_UNIT)
3156 {
3157 if (offset_ptr->var)
3158 {
3159 offset_ptr->var =
3160#ifdef ARGS_GROW_DOWNWARD
3161 round_down
3162#else
3163 round_up
3164#endif
3165 (ARGS_SIZE_TREE (*offset_ptr),
3166 boundary / BITS_PER_UNIT);
3167 offset_ptr->constant = 0; /*?*/
3168 }
3169 else
3170 offset_ptr->constant =
3171#ifdef ARGS_GROW_DOWNWARD
3172 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3173#else
3174 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3175#endif
3176 }
3177}
3178
3179static void
3180pad_below (offset_ptr, passed_mode, sizetree)
3181 struct args_size *offset_ptr;
3182 enum machine_mode passed_mode;
3183 tree sizetree;
3184{
3185 if (passed_mode != BLKmode)
3186 {
3187 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3188 offset_ptr->constant
3189 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3190 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3191 - GET_MODE_SIZE (passed_mode));
3192 }
3193 else
3194 {
3195 if (TREE_CODE (sizetree) != INTEGER_CST
3196 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3197 {
3198 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3199 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3200 /* Add it in. */
3201 ADD_PARM_SIZE (*offset_ptr, s2);
3202 SUB_PARM_SIZE (*offset_ptr, sizetree);
3203 }
3204 }
3205}
3206
3207static tree
3208round_down (value, divisor)
3209 tree value;
3210 int divisor;
3211{
3212 return size_binop (MULT_EXPR,
3213 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3214 size_int (divisor));
3215}
3216\f
3217/* Walk the tree of blocks describing the binding levels within a function
3218 and warn about uninitialized variables.
3219 This is done after calling flow_analysis and before global_alloc
3220 clobbers the pseudo-regs to hard regs. */
3221
3222void
3223uninitialized_vars_warning (block)
3224 tree block;
3225{
3226 register tree decl, sub;
3227 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3228 {
3229 if (TREE_CODE (decl) == VAR_DECL
3230 /* These warnings are unreliable for and aggregates
3231 because assigning the fields one by one can fail to convince
3232 flow.c that the entire aggregate was initialized.
3233 Unions are troublesome because members may be shorter. */
3234 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3235 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
3236 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3237 && DECL_RTL (decl) != 0
3238 && GET_CODE (DECL_RTL (decl)) == REG
3239 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3240 warning_with_decl (decl,
3241 "`%s' may be used uninitialized in this function");
3242 if (TREE_CODE (decl) == VAR_DECL
3243 && DECL_RTL (decl) != 0
3244 && GET_CODE (DECL_RTL (decl)) == REG
3245 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3246 warning_with_decl (decl,
3247 "variable `%s' may be clobbered by `longjmp'");
3248 }
3249 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3250 uninitialized_vars_warning (sub);
3251}
3252
3253/* Do the appropriate part of uninitialized_vars_warning
3254 but for arguments instead of local variables. */
3255
3256void
3257setjmp_args_warning (block)
3258 tree block;
3259{
3260 register tree decl;
3261 for (decl = DECL_ARGUMENTS (current_function_decl);
3262 decl; decl = TREE_CHAIN (decl))
3263 if (DECL_RTL (decl) != 0
3264 && GET_CODE (DECL_RTL (decl)) == REG
3265 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3266 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp'");
3267}
3268
3269/* If this function call setjmp, put all vars into the stack
3270 unless they were declared `register'. */
3271
3272void
3273setjmp_protect (block)
3274 tree block;
3275{
3276 register tree decl, sub;
3277 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3278 if ((TREE_CODE (decl) == VAR_DECL
3279 || TREE_CODE (decl) == PARM_DECL)
3280 && DECL_RTL (decl) != 0
3281 && GET_CODE (DECL_RTL (decl)) == REG
b335c2cc
TW
3282 /* If this variable came from an inline function, it must be
3283 that it's life doesn't overlap the setjmp. If there was a
3284 setjmp in the function, it would already be in memory. We
3285 must exclude such variable because their DECL_RTL might be
3286 set to strange things such as virtual_stack_vars_rtx. */
3287 && ! DECL_FROM_INLINE (decl)
6f086dfc
RS
3288 && (
3289#ifdef NON_SAVING_SETJMP
3290 /* If longjmp doesn't restore the registers,
3291 don't put anything in them. */
3292 NON_SAVING_SETJMP
3293 ||
3294#endif
a82ad570 3295 ! DECL_REGISTER (decl)))
6f086dfc
RS
3296 put_var_into_stack (decl);
3297 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3298 setjmp_protect (sub);
3299}
3300\f
3301/* Like the previous function, but for args instead of local variables. */
3302
3303void
3304setjmp_protect_args ()
3305{
3306 register tree decl, sub;
3307 for (decl = DECL_ARGUMENTS (current_function_decl);
3308 decl; decl = TREE_CHAIN (decl))
3309 if ((TREE_CODE (decl) == VAR_DECL
3310 || TREE_CODE (decl) == PARM_DECL)
3311 && DECL_RTL (decl) != 0
3312 && GET_CODE (DECL_RTL (decl)) == REG
3313 && (
3314 /* If longjmp doesn't restore the registers,
3315 don't put anything in them. */
3316#ifdef NON_SAVING_SETJMP
3317 NON_SAVING_SETJMP
3318 ||
3319#endif
a82ad570 3320 ! DECL_REGISTER (decl)))
6f086dfc
RS
3321 put_var_into_stack (decl);
3322}
3323\f
3324/* Return the context-pointer register corresponding to DECL,
3325 or 0 if it does not need one. */
3326
3327rtx
3328lookup_static_chain (decl)
3329 tree decl;
3330{
3331 tree context = decl_function_context (decl);
3332 tree link;
3333
3334 if (context == 0)
3335 return 0;
3336
3337 /* We treat inline_function_decl as an alias for the current function
3338 because that is the inline function whose vars, types, etc.
3339 are being merged into the current function.
3340 See expand_inline_function. */
3341 if (context == current_function_decl || context == inline_function_decl)
3342 return virtual_stack_vars_rtx;
3343
3344 for (link = context_display; link; link = TREE_CHAIN (link))
3345 if (TREE_PURPOSE (link) == context)
3346 return RTL_EXPR_RTL (TREE_VALUE (link));
3347
3348 abort ();
3349}
3350\f
3351/* Convert a stack slot address ADDR for variable VAR
3352 (from a containing function)
3353 into an address valid in this function (using a static chain). */
3354
3355rtx
3356fix_lexical_addr (addr, var)
3357 rtx addr;
3358 tree var;
3359{
3360 rtx basereg;
3361 int displacement;
3362 tree context = decl_function_context (var);
3363 struct function *fp;
3364 rtx base = 0;
3365
3366 /* If this is the present function, we need not do anything. */
3367 if (context == current_function_decl || context == inline_function_decl)
3368 return addr;
3369
3370 for (fp = outer_function_chain; fp; fp = fp->next)
3371 if (fp->decl == context)
3372 break;
3373
3374 if (fp == 0)
3375 abort ();
3376
3377 /* Decode given address as base reg plus displacement. */
3378 if (GET_CODE (addr) == REG)
3379 basereg = addr, displacement = 0;
3380 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3381 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3382 else
3383 abort ();
3384
3385 /* We accept vars reached via the containing function's
3386 incoming arg pointer and via its stack variables pointer. */
3387 if (basereg == fp->internal_arg_pointer)
3388 {
3389 /* If reached via arg pointer, get the arg pointer value
3390 out of that function's stack frame.
3391
3392 There are two cases: If a separate ap is needed, allocate a
3393 slot in the outer function for it and dereference it that way.
3394 This is correct even if the real ap is actually a pseudo.
3395 Otherwise, just adjust the offset from the frame pointer to
3396 compensate. */
3397
3398#ifdef NEED_SEPARATE_AP
3399 rtx addr;
3400
3401 if (fp->arg_pointer_save_area == 0)
3402 fp->arg_pointer_save_area
3403 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
3404
3405 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
3406 addr = memory_address (Pmode, addr);
3407
3408 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
3409#else
3410 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
86f8eff3 3411 base = lookup_static_chain (var);
6f086dfc
RS
3412#endif
3413 }
3414
3415 else if (basereg == virtual_stack_vars_rtx)
3416 {
3417 /* This is the same code as lookup_static_chain, duplicated here to
3418 avoid an extra call to decl_function_context. */
3419 tree link;
3420
3421 for (link = context_display; link; link = TREE_CHAIN (link))
3422 if (TREE_PURPOSE (link) == context)
3423 {
3424 base = RTL_EXPR_RTL (TREE_VALUE (link));
3425 break;
3426 }
3427 }
3428
3429 if (base == 0)
3430 abort ();
3431
3432 /* Use same offset, relative to appropriate static chain or argument
3433 pointer. */
3434 return plus_constant (base, displacement);
3435}
3436\f
3437/* Return the address of the trampoline for entering nested fn FUNCTION.
3438 If necessary, allocate a trampoline (in the stack frame)
3439 and emit rtl to initialize its contents (at entry to this function). */
3440
3441rtx
3442trampoline_address (function)
3443 tree function;
3444{
3445 tree link;
3446 tree rtlexp;
3447 rtx tramp;
3448 struct function *fp;
3449 tree fn_context;
3450
3451 /* Find an existing trampoline and return it. */
3452 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3453 if (TREE_PURPOSE (link) == function)
3454 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
3455 for (fp = outer_function_chain; fp; fp = fp->next)
3456 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
3457 if (TREE_PURPOSE (link) == function)
3458 {
3459 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
3460 function);
3461 return round_trampoline_addr (tramp);
3462 }
3463
3464 /* None exists; we must make one. */
3465
3466 /* Find the `struct function' for the function containing FUNCTION. */
3467 fp = 0;
3468 fn_context = decl_function_context (function);
3469 if (fn_context != current_function_decl)
3470 for (fp = outer_function_chain; fp; fp = fp->next)
3471 if (fp->decl == fn_context)
3472 break;
3473
3474 /* Allocate run-time space for this trampoline
3475 (usually in the defining function's stack frame). */
3476#ifdef ALLOCATE_TRAMPOLINE
3477 tramp = ALLOCATE_TRAMPOLINE (fp);
3478#else
3479 /* If rounding needed, allocate extra space
3480 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3481#ifdef TRAMPOLINE_ALIGNMENT
3482#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3483#else
3484#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3485#endif
3486 if (fp != 0)
3487 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
3488 else
3489 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
3490#endif
3491
3492 /* Record the trampoline for reuse and note it for later initialization
3493 by expand_function_end. */
3494 if (fp != 0)
3495 {
3496 push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack);
3497 rtlexp = make_node (RTL_EXPR);
3498 RTL_EXPR_RTL (rtlexp) = tramp;
3499 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
3500 pop_obstacks ();
3501 }
3502 else
3503 {
3504 /* Make the RTL_EXPR node temporary, not momentary, so that the
3505 trampoline_list doesn't become garbage. */
3506 int momentary = suspend_momentary ();
3507 rtlexp = make_node (RTL_EXPR);
3508 resume_momentary (momentary);
3509
3510 RTL_EXPR_RTL (rtlexp) = tramp;
3511 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
3512 }
3513
3514 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
3515 return round_trampoline_addr (tramp);
3516}
3517
3518/* Given a trampoline address,
3519 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3520
3521static rtx
3522round_trampoline_addr (tramp)
3523 rtx tramp;
3524{
3525#ifdef TRAMPOLINE_ALIGNMENT
3526 /* Round address up to desired boundary. */
3527 rtx temp = gen_reg_rtx (Pmode);
3528 temp = expand_binop (Pmode, add_optab, tramp,
5f4f0e22 3529 GEN_INT (TRAMPOLINE_ALIGNMENT - 1),
6f086dfc
RS
3530 temp, 0, OPTAB_LIB_WIDEN);
3531 tramp = expand_binop (Pmode, and_optab, temp,
5f4f0e22 3532 GEN_INT (- TRAMPOLINE_ALIGNMENT),
6f086dfc
RS
3533 temp, 0, OPTAB_LIB_WIDEN);
3534#endif
3535 return tramp;
3536}
3537\f
467456d0
RS
3538/* The functions identify_blocks and reorder_blocks provide a way to
3539 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
3540 duplicate portions of the RTL code. Call identify_blocks before
3541 changing the RTL, and call reorder_blocks after. */
3542
3543static int all_blocks ();
3544static tree blocks_nreverse ();
3545
3546/* Put all this function's BLOCK nodes into a vector, and return it.
3547 Also store in each NOTE for the beginning or end of a block
3548 the index of that block in the vector.
3549 The arguments are TOP_BLOCK, the top-level block of the function,
3550 and INSNS, the insn chain of the function. */
3551
3552tree *
3553identify_blocks (top_block, insns)
3554 tree top_block;
3555 rtx insns;
3556{
fc289cd1
JW
3557 int n_blocks;
3558 tree *block_vector;
3559 int *block_stack;
467456d0
RS
3560 int depth = 0;
3561 int next_block_number = 0;
3562 int current_block_number = 0;
3563 rtx insn;
3564
fc289cd1
JW
3565 if (top_block == 0)
3566 return 0;
3567
3568 n_blocks = all_blocks (top_block, 0);
3569 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
3570 block_stack = (int *) alloca (n_blocks * sizeof (int));
3571
467456d0
RS
3572 all_blocks (top_block, block_vector);
3573
3574 for (insn = insns; insn; insn = NEXT_INSN (insn))
3575 if (GET_CODE (insn) == NOTE)
3576 {
3577 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3578 {
3579 block_stack[depth++] = current_block_number;
3580 current_block_number = next_block_number;
1b2ac438 3581 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
467456d0
RS
3582 }
3583 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3584 {
3585 current_block_number = block_stack[--depth];
1b2ac438 3586 NOTE_BLOCK_NUMBER (insn) = current_block_number;
467456d0
RS
3587 }
3588 }
3589
3590 return block_vector;
3591}
3592
3593/* Given BLOCK_VECTOR which was returned by identify_blocks,
3594 and a revised instruction chain, rebuild the tree structure
3595 of BLOCK nodes to correspond to the new order of RTL.
fc289cd1 3596 The new block tree is inserted below TOP_BLOCK.
467456d0
RS
3597 Returns the current top-level block. */
3598
3599tree
fc289cd1 3600reorder_blocks (block_vector, top_block, insns)
467456d0 3601 tree *block_vector;
fc289cd1 3602 tree top_block;
467456d0
RS
3603 rtx insns;
3604{
fc289cd1 3605 tree current_block = top_block;
467456d0
RS
3606 rtx insn;
3607
fc289cd1
JW
3608 if (block_vector == 0)
3609 return top_block;
3610
3611 /* Prune the old tree away, so that it doesn't get in the way. */
3612 BLOCK_SUBBLOCKS (current_block) = 0;
3613
467456d0
RS
3614 for (insn = insns; insn; insn = NEXT_INSN (insn))
3615 if (GET_CODE (insn) == NOTE)
3616 {
3617 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3618 {
3619 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
3620 /* If we have seen this block before, copy it. */
3621 if (TREE_ASM_WRITTEN (block))
3622 block = copy_node (block);
fc289cd1 3623 BLOCK_SUBBLOCKS (block) = 0;
467456d0
RS
3624 TREE_ASM_WRITTEN (block) = 1;
3625 BLOCK_SUPERCONTEXT (block) = current_block;
3626 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3627 BLOCK_SUBBLOCKS (current_block) = block;
3628 current_block = block;
1b2ac438 3629 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
3630 }
3631 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3632 {
3633 BLOCK_SUBBLOCKS (current_block)
3634 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3635 current_block = BLOCK_SUPERCONTEXT (current_block);
1b2ac438 3636 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
3637 }
3638 }
3639
3640 return current_block;
3641}
3642
3643/* Reverse the order of elements in the chain T of blocks,
3644 and return the new head of the chain (old last element). */
3645
3646static tree
3647blocks_nreverse (t)
3648 tree t;
3649{
3650 register tree prev = 0, decl, next;
3651 for (decl = t; decl; decl = next)
3652 {
3653 next = BLOCK_CHAIN (decl);
3654 BLOCK_CHAIN (decl) = prev;
3655 prev = decl;
3656 }
3657 return prev;
3658}
3659
3660/* Count the subblocks of BLOCK, and list them all into the vector VECTOR.
3661 Also clear TREE_ASM_WRITTEN in all blocks. */
3662
3663static int
3664all_blocks (block, vector)
3665 tree block;
3666 tree *vector;
3667{
3668 int n_blocks = 1;
3669 tree subblocks;
3670
3671 TREE_ASM_WRITTEN (block) = 0;
3672 /* Record this block. */
fc289cd1
JW
3673 if (vector)
3674 vector[0] = block;
467456d0
RS
3675
3676 /* Record the subblocks, and their subblocks. */
3677 for (subblocks = BLOCK_SUBBLOCKS (block);
3678 subblocks; subblocks = BLOCK_CHAIN (subblocks))
fc289cd1 3679 n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0);
467456d0
RS
3680
3681 return n_blocks;
3682}
3683\f
6f086dfc
RS
3684/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3685 and initialize static variables for generating RTL for the statements
3686 of the function. */
3687
3688void
3689init_function_start (subr, filename, line)
3690 tree subr;
3691 char *filename;
3692 int line;
3693{
3694 char *junk;
3695
3696 init_stmt_for_function ();
3697
3698 cse_not_expected = ! optimize;
3699
3700 /* Caller save not needed yet. */
3701 caller_save_needed = 0;
3702
3703 /* No stack slots have been made yet. */
3704 stack_slot_list = 0;
3705
3706 /* There is no stack slot for handling nonlocal gotos. */
3707 nonlocal_goto_handler_slot = 0;
3708 nonlocal_goto_stack_level = 0;
3709
3710 /* No labels have been declared for nonlocal use. */
3711 nonlocal_labels = 0;
3712
3713 /* No function calls so far in this function. */
3714 function_call_count = 0;
3715
3716 /* No parm regs have been allocated.
3717 (This is important for output_inline_function.) */
3718 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3719
3720 /* Initialize the RTL mechanism. */
3721 init_emit ();
3722
3723 /* Initialize the queue of pending postincrement and postdecrements,
3724 and some other info in expr.c. */
3725 init_expr ();
3726
3727 /* We haven't done register allocation yet. */
3728 reg_renumber = 0;
3729
3730 init_const_rtx_hash_table ();
3731
3732 current_function_name = (*decl_printable_name) (subr, &junk);
3733
3734 /* Nonzero if this is a nested function that uses a static chain. */
3735
3736 current_function_needs_context
3737 = (decl_function_context (current_function_decl) != 0);
3738
3739 /* Set if a call to setjmp is seen. */
3740 current_function_calls_setjmp = 0;
3741
3742 /* Set if a call to longjmp is seen. */
3743 current_function_calls_longjmp = 0;
3744
3745 current_function_calls_alloca = 0;
3746 current_function_has_nonlocal_label = 0;
3747 current_function_contains_functions = 0;
3748
3749 current_function_returns_pcc_struct = 0;
3750 current_function_returns_struct = 0;
3751 current_function_epilogue_delay_list = 0;
3752 current_function_uses_const_pool = 0;
3753 current_function_uses_pic_offset_table = 0;
3754
3755 /* We have not yet needed to make a label to jump to for tail-recursion. */
3756 tail_recursion_label = 0;
3757
3758 /* We haven't had a need to make a save area for ap yet. */
3759
3760 arg_pointer_save_area = 0;
3761
3762 /* No stack slots allocated yet. */
3763 frame_offset = 0;
3764
3765 /* No SAVE_EXPRs in this function yet. */
3766 save_expr_regs = 0;
3767
3768 /* No RTL_EXPRs in this function yet. */
3769 rtl_expr_chain = 0;
3770
3771 /* We have not allocated any temporaries yet. */
3772 temp_slots = 0;
3773 temp_slot_level = 0;
3774
3775 /* Within function body, compute a type's size as soon it is laid out. */
3776 immediate_size_expand++;
3777
3778 init_pending_stack_adjust ();
3779 inhibit_defer_pop = 0;
3780
3781 current_function_outgoing_args_size = 0;
3782
3783 /* Initialize the insn lengths. */
3784 init_insn_lengths ();
3785
3786 /* Prevent ever trying to delete the first instruction of a function.
3787 Also tell final how to output a linenum before the function prologue. */
3788 emit_line_note (filename, line);
3789
3790 /* Make sure first insn is a note even if we don't want linenums.
3791 This makes sure the first insn will never be deleted.
3792 Also, final expects a note to appear there. */
5f4f0e22 3793 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
3794
3795 /* Set flags used by final.c. */
3796 if (aggregate_value_p (DECL_RESULT (subr)))
3797 {
3798#ifdef PCC_STATIC_STRUCT_RETURN
3799 if (flag_pcc_struct_return)
3800 current_function_returns_pcc_struct = 1;
3801 else
3802#endif
3803 current_function_returns_struct = 1;
3804 }
3805
3806 /* Warn if this value is an aggregate type,
3807 regardless of which calling convention we are using for it. */
3808 if (warn_aggregate_return
3809 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
3810 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
3811 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
3812 warning ("function returns an aggregate");
3813
3814 current_function_returns_pointer
3815 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
3816
3817 /* Indicate that we need to distinguish between the return value of the
3818 present function and the return value of a function being called. */
3819 rtx_equal_function_value_matters = 1;
3820
3821 /* Indicate that we have not instantiated virtual registers yet. */
3822 virtuals_instantiated = 0;
3823
3824 /* Indicate we have no need of a frame pointer yet. */
3825 frame_pointer_needed = 0;
3826
3827 /* By default assume not varargs. */
3828 current_function_varargs = 0;
3829}
3830
3831/* Indicate that the current function uses extra args
3832 not explicitly mentioned in the argument list in any fashion. */
3833
3834void
3835mark_varargs ()
3836{
3837 current_function_varargs = 1;
3838}
3839
3840/* Expand a call to __main at the beginning of a possible main function. */
3841
3842void
3843expand_main_function ()
3844{
b335c2cc 3845#if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
6f086dfc
RS
3846 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__main"), 0,
3847 VOIDmode, 0);
b335c2cc 3848#endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
6f086dfc
RS
3849}
3850\f
3851/* Start the RTL for a new function, and set variables used for
3852 emitting RTL.
3853 SUBR is the FUNCTION_DECL node.
3854 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3855 the function's parameters, which must be run at any return statement. */
3856
3857void
3858expand_function_start (subr, parms_have_cleanups)
3859 tree subr;
3860 int parms_have_cleanups;
3861{
3862 register int i;
3863 tree tem;
3864 rtx last_ptr;
3865
3866 /* Make sure volatile mem refs aren't considered
3867 valid operands of arithmetic insns. */
3868 init_recog_no_volatile ();
3869
3870 /* If function gets a static chain arg, store it in the stack frame.
3871 Do this first, so it gets the first stack slot offset. */
3872 if (current_function_needs_context)
3e2481e9
JW
3873 {
3874 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3875 emit_move_insn (last_ptr, static_chain_incoming_rtx);
3876 }
6f086dfc
RS
3877
3878 /* If the parameters of this function need cleaning up, get a label
3879 for the beginning of the code which executes those cleanups. This must
3880 be done before doing anything with return_label. */
3881 if (parms_have_cleanups)
3882 cleanup_label = gen_label_rtx ();
3883 else
3884 cleanup_label = 0;
3885
3886 /* Make the label for return statements to jump to, if this machine
3887 does not have a one-instruction return and uses an epilogue,
3888 or if it returns a structure, or if it has parm cleanups. */
3889#ifdef HAVE_return
3890 if (cleanup_label == 0 && HAVE_return
3891 && ! current_function_returns_pcc_struct
3892 && ! (current_function_returns_struct && ! optimize))
3893 return_label = 0;
3894 else
3895 return_label = gen_label_rtx ();
3896#else
3897 return_label = gen_label_rtx ();
3898#endif
3899
3900 /* Initialize rtx used to return the value. */
3901 /* Do this before assign_parms so that we copy the struct value address
3902 before any library calls that assign parms might generate. */
3903
3904 /* Decide whether to return the value in memory or in a register. */
3905 if (aggregate_value_p (DECL_RESULT (subr)))
3906 {
3907 /* Returning something that won't go in a register. */
3908 register rtx value_address;
3909
3910#ifdef PCC_STATIC_STRUCT_RETURN
3911 if (current_function_returns_pcc_struct)
3912 {
3913 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
3914 value_address = assemble_static_space (size);
3915 }
3916 else
3917#endif
3918 {
3919 /* Expect to be passed the address of a place to store the value.
3920 If it is passed as an argument, assign_parms will take care of
3921 it. */
3922 if (struct_value_incoming_rtx)
3923 {
3924 value_address = gen_reg_rtx (Pmode);
3925 emit_move_insn (value_address, struct_value_incoming_rtx);
3926 }
3927 }
3928 if (value_address)
3929 DECL_RTL (DECL_RESULT (subr))
3930 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
3931 value_address);
3932 }
3933 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
3934 /* If return mode is void, this decl rtl should not be used. */
3935 DECL_RTL (DECL_RESULT (subr)) = 0;
3936 else if (parms_have_cleanups)
3937 /* If function will end with cleanup code for parms,
3938 compute the return values into a pseudo reg,
3939 which we will copy into the true return register
3940 after the cleanups are done. */
3941 DECL_RTL (DECL_RESULT (subr))
3942 = gen_reg_rtx (DECL_MODE (DECL_RESULT (subr)));
3943 else
3944 /* Scalar, returned in a register. */
3945 {
3946#ifdef FUNCTION_OUTGOING_VALUE
3947 DECL_RTL (DECL_RESULT (subr))
3948 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
3949#else
3950 DECL_RTL (DECL_RESULT (subr))
3951 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
3952#endif
3953
3954 /* Mark this reg as the function's return value. */
3955 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
3956 {
3957 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
3958 /* Needed because we may need to move this to memory
3959 in case it's a named return value whose address is taken. */
a82ad570 3960 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6f086dfc
RS
3961 }
3962 }
3963
3964 /* Initialize rtx for parameters and local variables.
3965 In some cases this requires emitting insns. */
3966
3967 assign_parms (subr, 0);
3968
3969 /* The following was moved from init_function_start.
3970 The move is supposed to make sdb output more accurate. */
3971 /* Indicate the beginning of the function body,
3972 as opposed to parm setup. */
5f4f0e22 3973 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6f086dfc
RS
3974
3975 /* If doing stupid allocation, mark parms as born here. */
3976
3977 if (GET_CODE (get_last_insn ()) != NOTE)
5f4f0e22 3978 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
3979 parm_birth_insn = get_last_insn ();
3980
3981 if (obey_regdecls)
3982 {
3983 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
3984 use_variable (regno_reg_rtx[i]);
3985
3986 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
3987 use_variable (current_function_internal_arg_pointer);
3988 }
3989
3990 /* Fetch static chain values for containing functions. */
3991 tem = decl_function_context (current_function_decl);
3e2481e9
JW
3992 /* If not doing stupid register allocation, then start off with the static
3993 chain pointer in a pseudo register. Otherwise, we use the stack
3994 address that was generated above. */
3995 if (tem && ! obey_regdecls)
6f086dfc
RS
3996 last_ptr = copy_to_reg (static_chain_incoming_rtx);
3997 context_display = 0;
3998 while (tem)
3999 {
4000 tree rtlexp = make_node (RTL_EXPR);
4001
4002 RTL_EXPR_RTL (rtlexp) = last_ptr;
4003 context_display = tree_cons (tem, rtlexp, context_display);
4004 tem = decl_function_context (tem);
4005 if (tem == 0)
4006 break;
4007 /* Chain thru stack frames, assuming pointer to next lexical frame
4008 is found at the place we always store it. */
4009#ifdef FRAME_GROWS_DOWNWARD
4010 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
4011#endif
4012 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
4013 memory_address (Pmode, last_ptr)));
4014 }
4015
4016 /* After the display initializations is where the tail-recursion label
4017 should go, if we end up needing one. Ensure we have a NOTE here
4018 since some things (like trampolines) get placed before this. */
5f4f0e22 4019 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
4020
4021 /* Evaluate now the sizes of any types declared among the arguments. */
4022 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5f4f0e22 4023 expand_expr (TREE_VALUE (tem), NULL_RTX, VOIDmode, 0);
6f086dfc
RS
4024
4025 /* Make sure there is a line number after the function entry setup code. */
4026 force_next_line_note ();
4027}
4028\f
4029/* Generate RTL for the end of the current function.
4030 FILENAME and LINE are the current position in the source file. */
4031
4032/* It is up to language-specific callers to do cleanups for parameters. */
4033
4034void
4035expand_function_end (filename, line)
4036 char *filename;
4037 int line;
4038{
4039 register int i;
4040 tree link;
4041
4042 static rtx initial_trampoline;
4043
4044#ifdef NON_SAVING_SETJMP
4045 /* Don't put any variables in registers if we call setjmp
4046 on a machine that fails to restore the registers. */
4047 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
4048 {
4049 setjmp_protect (DECL_INITIAL (current_function_decl));
4050 setjmp_protect_args ();
4051 }
4052#endif
4053
4054 /* Save the argument pointer if a save area was made for it. */
4055 if (arg_pointer_save_area)
4056 {
4057 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
4058 emit_insn_before (x, tail_recursion_reentry);
4059 }
4060
4061 /* Initialize any trampolines required by this function. */
4062 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4063 {
4064 tree function = TREE_PURPOSE (link);
4065 rtx context = lookup_static_chain (function);
4066 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
4067 rtx seq;
4068
4069 /* First make sure this compilation has a template for
4070 initializing trampolines. */
4071 if (initial_trampoline == 0)
86f8eff3
RK
4072 {
4073 end_temporary_allocation ();
4074 initial_trampoline
4075 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
4076 resume_temporary_allocation ();
4077 }
6f086dfc
RS
4078
4079 /* Generate insns to initialize the trampoline. */
4080 start_sequence ();
4081 tramp = change_address (initial_trampoline, BLKmode,
4082 round_trampoline_addr (XEXP (tramp, 0)));
5f4f0e22 4083 emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE),
6f086dfc
RS
4084 FUNCTION_BOUNDARY / BITS_PER_UNIT);
4085 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
4086 XEXP (DECL_RTL (function), 0), context);
4087 seq = get_insns ();
4088 end_sequence ();
4089
4090 /* Put those insns at entry to the containing function (this one). */
4091 emit_insns_before (seq, tail_recursion_reentry);
4092 }
4093 /* Clear the trampoline_list for the next function. */
4094 trampoline_list = 0;
4095
4096#if 0 /* I think unused parms are legitimate enough. */
4097 /* Warn about unused parms. */
4098 if (warn_unused)
4099 {
4100 rtx decl;
4101
4102 for (decl = DECL_ARGUMENTS (current_function_decl);
4103 decl; decl = TREE_CHAIN (decl))
4104 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
4105 warning_with_decl (decl, "unused parameter `%s'");
4106 }
4107#endif
4108
4109 /* Delete handlers for nonlocal gotos if nothing uses them. */
4110 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
4111 delete_handlers ();
4112
4113 /* End any sequences that failed to be closed due to syntax errors. */
4114 while (in_sequence_p ())
5f4f0e22 4115 end_sequence ();
6f086dfc
RS
4116
4117 /* Outside function body, can't compute type's actual size
4118 until next function's body starts. */
4119 immediate_size_expand--;
4120
4121 /* If doing stupid register allocation,
4122 mark register parms as dying here. */
4123
4124 if (obey_regdecls)
4125 {
4126 rtx tem;
4127 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4128 use_variable (regno_reg_rtx[i]);
4129
4130 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
4131
4132 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
4133 {
4134 use_variable (XEXP (tem, 0));
4135 use_variable_after (XEXP (tem, 0), parm_birth_insn);
4136 }
4137
4138 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4139 use_variable (current_function_internal_arg_pointer);
4140 }
4141
4142 clear_pending_stack_adjust ();
4143 do_pending_stack_adjust ();
4144
4145 /* Mark the end of the function body.
4146 If control reaches this insn, the function can drop through
4147 without returning a value. */
5f4f0e22 4148 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6f086dfc
RS
4149
4150 /* Output a linenumber for the end of the function.
4151 SDB depends on this. */
4152 emit_line_note_force (filename, line);
4153
4154 /* Output the label for the actual return from the function,
4155 if one is expected. This happens either because a function epilogue
4156 is used instead of a return instruction, or because a return was done
4157 with a goto in order to run local cleanups, or because of pcc-style
4158 structure returning. */
4159
4160 if (return_label)
4161 emit_label (return_label);
4162
4163 /* If we had calls to alloca, and this machine needs
4164 an accurate stack pointer to exit the function,
4165 insert some code to save and restore the stack pointer. */
4166#ifdef EXIT_IGNORE_STACK
4167 if (! EXIT_IGNORE_STACK)
4168#endif
4169 if (current_function_calls_alloca)
4170 {
59257ff7
RK
4171 rtx tem = 0;
4172
4173 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5f4f0e22 4174 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6f086dfc
RS
4175 }
4176
4177 /* If scalar return value was computed in a pseudo-reg,
4178 copy that to the hard return register. */
4179 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
4180 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
4181 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
4182 >= FIRST_PSEUDO_REGISTER))
4183 {
4184 rtx real_decl_result;
4185
4186#ifdef FUNCTION_OUTGOING_VALUE
4187 real_decl_result
4188 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4189 current_function_decl);
4190#else
4191 real_decl_result
4192 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4193 current_function_decl);
4194#endif
4195 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
4196 emit_move_insn (real_decl_result,
4197 DECL_RTL (DECL_RESULT (current_function_decl)));
4198 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
4199 }
4200
4201 /* If returning a structure, arrange to return the address of the value
4202 in a place where debuggers expect to find it.
4203
4204 If returning a structure PCC style,
4205 the caller also depends on this value.
4206 And current_function_returns_pcc_struct is not necessarily set. */
4207 if (current_function_returns_struct
4208 || current_function_returns_pcc_struct)
4209 {
4210 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4211 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4212#ifdef FUNCTION_OUTGOING_VALUE
4213 rtx outgoing
4214 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4215 current_function_decl);
4216#else
4217 rtx outgoing
4218 = FUNCTION_VALUE (build_pointer_type (type),
4219 current_function_decl);
4220#endif
4221
4222 /* Mark this as a function return value so integrate will delete the
4223 assignment and USE below when inlining this function. */
4224 REG_FUNCTION_VALUE_P (outgoing) = 1;
4225
4226 emit_move_insn (outgoing, value_address);
4227 use_variable (outgoing);
4228 }
4229
4230 /* Output a return insn if we are using one.
4231 Otherwise, let the rtl chain end here, to drop through
4232 into the epilogue. */
4233
4234#ifdef HAVE_return
4235 if (HAVE_return)
4236 {
4237 emit_jump_insn (gen_return ());
4238 emit_barrier ();
4239 }
4240#endif
4241
4242 /* Fix up any gotos that jumped out to the outermost
4243 binding level of the function.
4244 Must follow emitting RETURN_LABEL. */
4245
4246 /* If you have any cleanups to do at this point,
4247 and they need to create temporary variables,
4248 then you will lose. */
5f4f0e22 4249 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0);
6f086dfc 4250}
bdac5f58
TW
4251\f
4252/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
4253
4254static int *prologue;
4255static int *epilogue;
4256
4257/* Create an array that records the INSN_UIDs of INSNS (either a sequence
4258 or a single insn). */
4259
4260static int *
4261record_insns (insns)
4262 rtx insns;
4263{
4264 int *vec;
4265
4266 if (GET_CODE (insns) == SEQUENCE)
4267 {
4268 int len = XVECLEN (insns, 0);
4269 vec = (int *) oballoc ((len + 1) * sizeof (int));
4270 vec[len] = 0;
4271 while (--len >= 0)
4272 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
4273 }
4274 else
4275 {
4276 vec = (int *) oballoc (2 * sizeof (int));
4277 vec[0] = INSN_UID (insns);
4278 vec[1] = 0;
4279 }
4280 return vec;
4281}
4282
10914065 4283/* Determine how many INSN_UIDs in VEC are part of INSN. */
bdac5f58 4284
10914065 4285static int
bdac5f58
TW
4286contains (insn, vec)
4287 rtx insn;
4288 int *vec;
4289{
4290 register int i, j;
4291
4292 if (GET_CODE (insn) == INSN
4293 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4294 {
10914065 4295 int count = 0;
bdac5f58
TW
4296 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4297 for (j = 0; vec[j]; j++)
4298 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
10914065
TW
4299 count++;
4300 return count;
bdac5f58
TW
4301 }
4302 else
4303 {
4304 for (j = 0; vec[j]; j++)
4305 if (INSN_UID (insn) == vec[j])
10914065 4306 return 1;
bdac5f58
TW
4307 }
4308 return 0;
4309}
4310
4311/* Generate the prologe and epilogue RTL if the machine supports it. Thread
4312 this into place with notes indicating where the prologue ends and where
4313 the epilogue begins. Update the basic block information when possible. */
4314
4315void
4316thread_prologue_and_epilogue_insns (f)
4317 rtx f;
4318{
4319#ifdef HAVE_prologue
4320 if (HAVE_prologue)
4321 {
4322 rtx head, seq, insn;
4323
4324 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
4325 prologue insns and a NOTE_INSN_PROLOGUE_END. */
4326 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
4327 seq = gen_prologue ();
4328 head = emit_insn_after (seq, f);
4329
4330 /* Include the new prologue insns in the first block. Ignore them
4331 if they form a basic block unto themselves. */
4332 if (basic_block_head && n_basic_blocks
4333 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
4334 basic_block_head[0] = NEXT_INSN (f);
4335
4336 /* Retain a map of the prologue insns. */
4337 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
4338 }
4339 else
4340#endif
4341 prologue = 0;
4342
4343#ifdef HAVE_epilogue
4344 if (HAVE_epilogue)
4345 {
4346 rtx insn = get_last_insn ();
4347 rtx prev = prev_nonnote_insn (insn);
4348
4349 /* If we end with a BARRIER, we don't need an epilogue. */
4350 if (! (prev && GET_CODE (prev) == BARRIER))
4351 {
4352 rtx tail, seq;
4353
4354 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG,
4355 the epilogue insns (this must include the jump insn that
4356 returns), USE insns ad the end of a function, and a BARRIER. */
4357
4358 emit_barrier_after (insn);
4359
4360 /* Place the epilogue before the USE insns at the end of a
4361 function. */
4362 while (prev
4363 && GET_CODE (prev) == INSN
4364 && GET_CODE (PATTERN (prev)) == USE)
4365 {
4366 insn = PREV_INSN (prev);
4367 prev = prev_nonnote_insn (prev);
4368 }
4369
4370 seq = gen_epilogue ();
4371 tail = emit_jump_insn_after (seq, insn);
4372 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
4373
4374 /* Include the new epilogue insns in the last block. Ignore
4375 them if they form a basic block unto themselves. */
4376 if (basic_block_end && n_basic_blocks
4377 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
4378 basic_block_end[n_basic_blocks - 1] = tail;
4379
4380 /* Retain a map of the epilogue insns. */
4381 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
4382 return;
4383 }
4384 }
4385#endif
4386 epilogue = 0;
4387}
4388
4389/* Reposition the prologue-end and epilogue-begin notes after instruction
4390 scheduling and delayed branch scheduling. */
4391
4392void
4393reposition_prologue_and_epilogue_notes (f)
4394 rtx f;
4395{
4396#if defined (HAVE_prologue) || defined (HAVE_epilogue)
4397 /* Reposition the prologue and epilogue notes. */
4398 if (n_basic_blocks)
4399 {
4400 rtx next, prev;
bf526252 4401 int len;
bdac5f58
TW
4402
4403 if (prologue)
4404 {
bf526252
RK
4405 register rtx insn, note = 0;
4406
4407 /* Scan from the beginning until we reach the last prologue insn.
4408 We apparently can't depend on basic_block_{head,end} after
4409 reorg has run. */
4410 for (len = 0; prologue[len]; len++)
4411 ;
4412 for (insn = f; insn; insn = NEXT_INSN (insn))
4413 if (GET_CODE (insn) == NOTE)
4414 {
4415 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
4416 note = insn;
4417 }
10914065 4418 else if ((len -= contains (insn, prologue)) == 0)
bdac5f58 4419 {
bf526252
RK
4420 /* Find the prologue-end note if we haven't already, and
4421 move it to just after the last prologue insn. */
4422 if (note == 0)
4423 for (note = insn; note = NEXT_INSN (note);)
4424 if (GET_CODE (note) == NOTE
4425 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
4426 break;
4427 next = NEXT_INSN (note);
4428 prev = PREV_INSN (note);
bdac5f58
TW
4429 if (prev)
4430 NEXT_INSN (prev) = next;
4431 if (next)
4432 PREV_INSN (next) = prev;
bf526252 4433 add_insn_after (note, insn);
bdac5f58
TW
4434 break;
4435 }
4436 }
4437
4438 if (epilogue)
4439 {
bf526252
RK
4440 register rtx insn, note = 0;
4441
4442 /* Scan from the end until we reach the first epilogue insn.
4443 We apparently can't depend on basic_block_{head,end} after
4444 reorg has run. */
4445 for (len = 0; epilogue[len]; len++)
4446 ;
4447 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
4448 if (GET_CODE (insn) == NOTE)
4449 {
4450 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4451 note = insn;
4452 }
10914065 4453 else if ((len -= contains (insn, epilogue)) == 0)
bdac5f58 4454 {
bf526252
RK
4455 /* Find the epilogue-begin note if we haven't already, and
4456 move it to just before the first epilogue insn. */
4457 if (note == 0)
4458 for (note = insn; note = PREV_INSN (note);)
4459 if (GET_CODE (note) == NOTE
4460 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
4461 break;
4462 next = NEXT_INSN (note);
4463 prev = PREV_INSN (note);
bdac5f58
TW
4464 if (prev)
4465 NEXT_INSN (prev) = next;
4466 if (next)
4467 PREV_INSN (next) = prev;
bf526252 4468 add_insn_after (note, PREV_INSN (insn));
bdac5f58
TW
4469 break;
4470 }
4471 }
4472 }
4473#endif /* HAVE_prologue or HAVE_epilogue */
4474}
This page took 0.503924 seconds and 5 git commands to generate.