]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
Initial revision
[gcc.git] / gcc / function.c
CommitLineData
6f086dfc 1/* Expands front end tree to back end RTL for GNU C-Compiler
c51d911a 2 Copyright (C) 1987, 88, 89, 91, 92, 1993 Free Software Foundation, Inc.
6f086dfc
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
29
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
34
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
39
40#include "config.h"
41
42#include <stdio.h>
43
44#include "rtl.h"
45#include "tree.h"
46#include "flags.h"
47#include "function.h"
48#include "insn-flags.h"
49#include "expr.h"
50#include "insn-codes.h"
51#include "regs.h"
52#include "hard-reg-set.h"
53#include "insn-config.h"
54#include "recog.h"
55#include "output.h"
bdac5f58 56#include "basic-block.h"
6f086dfc 57
293e3de4
RS
58/* Some systems use __main in a way incompatible with its use in gcc, in these
59 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
60 give the same symbol without quotes for an alternative entry point. You
61 must define both, or niether. */
62#ifndef NAME__MAIN
63#define NAME__MAIN "__main"
64#define SYMBOL__MAIN __main
65#endif
66
6f086dfc
RS
67/* Round a value to the lowest integer less than it that is a multiple of
68 the required alignment. Avoid using division in case the value is
69 negative. Assume the alignment is a power of two. */
70#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
71
72/* Similar, but round to the next highest integer that meets the
73 alignment. */
74#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
75
76/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
77 during rtl generation. If they are different register numbers, this is
78 always true. It may also be true if
79 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
80 generation. See fix_lexical_addr for details. */
81
82#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
83#define NEED_SEPARATE_AP
84#endif
85
86/* Number of bytes of args popped by function being compiled on its return.
87 Zero if no bytes are to be popped.
88 May affect compilation of return insn or of function epilogue. */
89
90int current_function_pops_args;
91
92/* Nonzero if function being compiled needs to be given an address
93 where the value should be stored. */
94
95int current_function_returns_struct;
96
97/* Nonzero if function being compiled needs to
98 return the address of where it has put a structure value. */
99
100int current_function_returns_pcc_struct;
101
102/* Nonzero if function being compiled needs to be passed a static chain. */
103
104int current_function_needs_context;
105
106/* Nonzero if function being compiled can call setjmp. */
107
108int current_function_calls_setjmp;
109
110/* Nonzero if function being compiled can call longjmp. */
111
112int current_function_calls_longjmp;
113
114/* Nonzero if function being compiled receives nonlocal gotos
115 from nested functions. */
116
117int current_function_has_nonlocal_label;
118
119/* Nonzero if function being compiled contains nested functions. */
120
121int current_function_contains_functions;
122
123/* Nonzero if function being compiled can call alloca,
124 either as a subroutine or builtin. */
125
126int current_function_calls_alloca;
127
128/* Nonzero if the current function returns a pointer type */
129
130int current_function_returns_pointer;
131
132/* If some insns can be deferred to the delay slots of the epilogue, the
133 delay list for them is recorded here. */
134
135rtx current_function_epilogue_delay_list;
136
137/* If function's args have a fixed size, this is that size, in bytes.
138 Otherwise, it is -1.
139 May affect compilation of return insn or of function epilogue. */
140
141int current_function_args_size;
142
143/* # bytes the prologue should push and pretend that the caller pushed them.
144 The prologue must do this, but only if parms can be passed in registers. */
145
146int current_function_pretend_args_size;
147
148/* # of bytes of outgoing arguments required to be pushed by the prologue.
149 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
150 and no stack adjusts will be done on function calls. */
151
152int current_function_outgoing_args_size;
153
154/* This is the offset from the arg pointer to the place where the first
155 anonymous arg can be found, if there is one. */
156
157rtx current_function_arg_offset_rtx;
158
159/* Nonzero if current function uses varargs.h or equivalent.
160 Zero for functions that use stdarg.h. */
161
162int current_function_varargs;
163
164/* Quantities of various kinds of registers
165 used for the current function's args. */
166
167CUMULATIVE_ARGS current_function_args_info;
168
169/* Name of function now being compiled. */
170
171char *current_function_name;
172
173/* If non-zero, an RTL expression for that location at which the current
174 function returns its result. Always equal to
175 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
176 independently of the tree structures. */
177
178rtx current_function_return_rtx;
179
180/* Nonzero if the current function uses the constant pool. */
181
182int current_function_uses_const_pool;
183
184/* Nonzero if the current function uses pic_offset_table_rtx. */
185int current_function_uses_pic_offset_table;
186
187/* The arg pointer hard register, or the pseudo into which it was copied. */
188rtx current_function_internal_arg_pointer;
189
190/* The FUNCTION_DECL for an inline function currently being expanded. */
191tree inline_function_decl;
192
193/* Number of function calls seen so far in current function. */
194
195int function_call_count;
196
197/* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
198 (labels to which there can be nonlocal gotos from nested functions)
199 in this function. */
200
201tree nonlocal_labels;
202
203/* RTX for stack slot that holds the current handler for nonlocal gotos.
204 Zero when function does not have nonlocal labels. */
205
206rtx nonlocal_goto_handler_slot;
207
208/* RTX for stack slot that holds the stack pointer value to restore
209 for a nonlocal goto.
210 Zero when function does not have nonlocal labels. */
211
212rtx nonlocal_goto_stack_level;
213
214/* Label that will go on parm cleanup code, if any.
215 Jumping to this label runs cleanup code for parameters, if
216 such code must be run. Following this code is the logical return label. */
217
218rtx cleanup_label;
219
220/* Label that will go on function epilogue.
221 Jumping to this label serves as a "return" instruction
222 on machines which require execution of the epilogue on all returns. */
223
224rtx return_label;
225
226/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
227 So we can mark them all live at the end of the function, if nonopt. */
228rtx save_expr_regs;
229
230/* List (chain of EXPR_LISTs) of all stack slots in this function.
231 Made for the sake of unshare_all_rtl. */
232rtx stack_slot_list;
233
234/* Chain of all RTL_EXPRs that have insns in them. */
235tree rtl_expr_chain;
236
237/* Label to jump back to for tail recursion, or 0 if we have
238 not yet needed one for this function. */
239rtx tail_recursion_label;
240
241/* Place after which to insert the tail_recursion_label if we need one. */
242rtx tail_recursion_reentry;
243
244/* Location at which to save the argument pointer if it will need to be
245 referenced. There are two cases where this is done: if nonlocal gotos
246 exist, or if vars stored at an offset from the argument pointer will be
247 needed by inner routines. */
248
249rtx arg_pointer_save_area;
250
251/* Offset to end of allocated area of stack frame.
252 If stack grows down, this is the address of the last stack slot allocated.
253 If stack grows up, this is the address for the next slot. */
254int frame_offset;
255
256/* List (chain of TREE_LISTs) of static chains for containing functions.
257 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
258 in an RTL_EXPR in the TREE_VALUE. */
259static tree context_display;
260
261/* List (chain of TREE_LISTs) of trampolines for nested functions.
262 The trampoline sets up the static chain and jumps to the function.
263 We supply the trampoline's address when the function's address is requested.
264
265 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
266 in an RTL_EXPR in the TREE_VALUE. */
267static tree trampoline_list;
268
269/* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
270static rtx parm_birth_insn;
271
272#if 0
273/* Nonzero if a stack slot has been generated whose address is not
274 actually valid. It means that the generated rtl must all be scanned
275 to detect and correct the invalid addresses where they occur. */
276static int invalid_stack_slot;
277#endif
278
279/* Last insn of those whose job was to put parms into their nominal homes. */
280static rtx last_parm_insn;
281
282/* 1 + last pseudo register number used for loading a copy
283 of a parameter of this function. */
284static int max_parm_reg;
285
286/* Vector indexed by REGNO, containing location on stack in which
287 to put the parm which is nominally in pseudo register REGNO,
288 if we discover that that parm must go in the stack. */
289static rtx *parm_reg_stack_loc;
290
291#if 0 /* Turned off because 0 seems to work just as well. */
292/* Cleanup lists are required for binding levels regardless of whether
293 that binding level has cleanups or not. This node serves as the
294 cleanup list whenever an empty list is required. */
295static tree empty_cleanup_list;
296#endif
297
298/* Nonzero once virtual register instantiation has been done.
299 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
300static int virtuals_instantiated;
301
302/* Nonzero if we need to distinguish between the return value of this function
303 and the return value of a function called by this function. This helps
304 integrate.c */
305
306extern int rtx_equal_function_value_matters;
307
308void fixup_gotos ();
309
310static tree round_down ();
311static rtx round_trampoline_addr ();
312static rtx fixup_stack_1 ();
293e3de4 313static void put_reg_into_stack ();
6f086dfc
RS
314static void fixup_var_refs ();
315static void fixup_var_refs_insns ();
316static void fixup_var_refs_1 ();
317static void optimize_bit_field ();
318static void instantiate_decls ();
319static void instantiate_decls_1 ();
5a73491b 320static void instantiate_decl ();
6f086dfc
RS
321static int instantiate_virtual_regs_1 ();
322static rtx fixup_memory_subreg ();
323static rtx walk_fixup_memory_subreg ();
324\f
325/* In order to evaluate some expressions, such as function calls returning
326 structures in memory, we need to temporarily allocate stack locations.
327 We record each allocated temporary in the following structure.
328
329 Associated with each temporary slot is a nesting level. When we pop up
330 one level, all temporaries associated with the previous level are freed.
331 Normally, all temporaries are freed after the execution of the statement
332 in which they were created. However, if we are inside a ({...}) grouping,
333 the result may be in a temporary and hence must be preserved. If the
334 result could be in a temporary, we preserve it if we can determine which
335 one it is in. If we cannot determine which temporary may contain the
336 result, all temporaries are preserved. A temporary is preserved by
337 pretending it was allocated at the previous nesting level.
338
339 Automatic variables are also assigned temporary slots, at the nesting
340 level where they are defined. They are marked a "kept" so that
341 free_temp_slots will not free them. */
342
343struct temp_slot
344{
345 /* Points to next temporary slot. */
346 struct temp_slot *next;
347 /* The rtx to used to reference the slot. */
348 rtx slot;
349 /* The size, in units, of the slot. */
350 int size;
351 /* Non-zero if this temporary is currently in use. */
352 char in_use;
353 /* Nesting level at which this slot is being used. */
354 int level;
355 /* Non-zero if this should survive a call to free_temp_slots. */
356 int keep;
357};
358
359/* List of all temporaries allocated, both available and in use. */
360
361struct temp_slot *temp_slots;
362
363/* Current nesting level for temporaries. */
364
365int temp_slot_level;
366\f
367/* Pointer to chain of `struct function' for containing functions. */
368struct function *outer_function_chain;
369
370/* Given a function decl for a containing function,
371 return the `struct function' for it. */
372
373struct function *
374find_function_data (decl)
375 tree decl;
376{
377 struct function *p;
378 for (p = outer_function_chain; p; p = p->next)
379 if (p->decl == decl)
380 return p;
381 abort ();
382}
383
384/* Save the current context for compilation of a nested function.
385 This is called from language-specific code.
386 The caller is responsible for saving any language-specific status,
6dc42e49 387 since this function knows only about language-independent variables. */
6f086dfc
RS
388
389void
390push_function_context ()
391{
392 struct function *p = (struct function *) xmalloc (sizeof (struct function));
393
394 p->next = outer_function_chain;
395 outer_function_chain = p;
396
397 p->name = current_function_name;
398 p->decl = current_function_decl;
399 p->pops_args = current_function_pops_args;
400 p->returns_struct = current_function_returns_struct;
401 p->returns_pcc_struct = current_function_returns_pcc_struct;
402 p->needs_context = current_function_needs_context;
403 p->calls_setjmp = current_function_calls_setjmp;
404 p->calls_longjmp = current_function_calls_longjmp;
405 p->calls_alloca = current_function_calls_alloca;
406 p->has_nonlocal_label = current_function_has_nonlocal_label;
407 p->args_size = current_function_args_size;
408 p->pretend_args_size = current_function_pretend_args_size;
409 p->arg_offset_rtx = current_function_arg_offset_rtx;
410 p->uses_const_pool = current_function_uses_const_pool;
411 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
412 p->internal_arg_pointer = current_function_internal_arg_pointer;
413 p->max_parm_reg = max_parm_reg;
414 p->parm_reg_stack_loc = parm_reg_stack_loc;
415 p->outgoing_args_size = current_function_outgoing_args_size;
416 p->return_rtx = current_function_return_rtx;
417 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
418 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
419 p->nonlocal_labels = nonlocal_labels;
420 p->cleanup_label = cleanup_label;
421 p->return_label = return_label;
422 p->save_expr_regs = save_expr_regs;
423 p->stack_slot_list = stack_slot_list;
424 p->parm_birth_insn = parm_birth_insn;
425 p->frame_offset = frame_offset;
426 p->tail_recursion_label = tail_recursion_label;
427 p->tail_recursion_reentry = tail_recursion_reentry;
428 p->arg_pointer_save_area = arg_pointer_save_area;
429 p->rtl_expr_chain = rtl_expr_chain;
430 p->last_parm_insn = last_parm_insn;
431 p->context_display = context_display;
432 p->trampoline_list = trampoline_list;
433 p->function_call_count = function_call_count;
434 p->temp_slots = temp_slots;
435 p->temp_slot_level = temp_slot_level;
436 p->fixup_var_refs_queue = 0;
f979c996 437 p->epilogue_delay_list = current_function_epilogue_delay_list;
6f086dfc
RS
438
439 save_tree_status (p);
440 save_storage_status (p);
441 save_emit_status (p);
442 init_emit ();
443 save_expr_status (p);
444 save_stmt_status (p);
a506307a 445 save_varasm_status (p);
6f086dfc
RS
446}
447
448/* Restore the last saved context, at the end of a nested function.
449 This function is called from language-specific code. */
450
451void
452pop_function_context ()
453{
454 struct function *p = outer_function_chain;
455
456 outer_function_chain = p->next;
457
458 current_function_name = p->name;
459 current_function_decl = p->decl;
460 current_function_pops_args = p->pops_args;
461 current_function_returns_struct = p->returns_struct;
462 current_function_returns_pcc_struct = p->returns_pcc_struct;
463 current_function_needs_context = p->needs_context;
464 current_function_calls_setjmp = p->calls_setjmp;
465 current_function_calls_longjmp = p->calls_longjmp;
466 current_function_calls_alloca = p->calls_alloca;
467 current_function_has_nonlocal_label = p->has_nonlocal_label;
468 current_function_contains_functions = 1;
469 current_function_args_size = p->args_size;
470 current_function_pretend_args_size = p->pretend_args_size;
471 current_function_arg_offset_rtx = p->arg_offset_rtx;
472 current_function_uses_const_pool = p->uses_const_pool;
473 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
474 current_function_internal_arg_pointer = p->internal_arg_pointer;
475 max_parm_reg = p->max_parm_reg;
476 parm_reg_stack_loc = p->parm_reg_stack_loc;
477 current_function_outgoing_args_size = p->outgoing_args_size;
478 current_function_return_rtx = p->return_rtx;
479 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
480 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
481 nonlocal_labels = p->nonlocal_labels;
482 cleanup_label = p->cleanup_label;
483 return_label = p->return_label;
484 save_expr_regs = p->save_expr_regs;
485 stack_slot_list = p->stack_slot_list;
486 parm_birth_insn = p->parm_birth_insn;
487 frame_offset = p->frame_offset;
488 tail_recursion_label = p->tail_recursion_label;
489 tail_recursion_reentry = p->tail_recursion_reentry;
490 arg_pointer_save_area = p->arg_pointer_save_area;
491 rtl_expr_chain = p->rtl_expr_chain;
492 last_parm_insn = p->last_parm_insn;
493 context_display = p->context_display;
494 trampoline_list = p->trampoline_list;
495 function_call_count = p->function_call_count;
496 temp_slots = p->temp_slots;
497 temp_slot_level = p->temp_slot_level;
f979c996 498 current_function_epilogue_delay_list = p->epilogue_delay_list;
6f086dfc
RS
499
500 restore_tree_status (p);
501 restore_storage_status (p);
502 restore_expr_status (p);
503 restore_emit_status (p);
504 restore_stmt_status (p);
a506307a 505 restore_varasm_status (p);
6f086dfc
RS
506
507 /* Finish doing put_var_into_stack for any of our variables
508 which became addressable during the nested function. */
509 {
510 struct var_refs_queue *queue = p->fixup_var_refs_queue;
511 for (; queue; queue = queue->next)
00d8a4c1 512 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
6f086dfc
RS
513 }
514
515 free (p);
516
517 /* Reset variables that have known state during rtx generation. */
518 rtx_equal_function_value_matters = 1;
519 virtuals_instantiated = 0;
520}
521\f
522/* Allocate fixed slots in the stack frame of the current function. */
523
524/* Return size needed for stack frame based on slots so far allocated.
525 This size counts from zero. It is not rounded to STACK_BOUNDARY;
526 the caller may have to do that. */
527
528int
529get_frame_size ()
530{
531#ifdef FRAME_GROWS_DOWNWARD
532 return -frame_offset;
533#else
534 return frame_offset;
535#endif
536}
537
538/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
539 with machine mode MODE.
540
541 ALIGN controls the amount of alignment for the address of the slot:
542 0 means according to MODE,
543 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
544 positive specifies alignment boundary in bits.
545
546 We do not round to stack_boundary here. */
547
548rtx
549assign_stack_local (mode, size, align)
550 enum machine_mode mode;
551 int size;
552 int align;
553{
554 register rtx x, addr;
555 int bigend_correction = 0;
556 int alignment;
557
558 if (align == 0)
559 {
560 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
561 if (mode == BLKmode)
562 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
563 }
564 else if (align == -1)
565 {
566 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
567 size = CEIL_ROUND (size, alignment);
568 }
569 else
570 alignment = align / BITS_PER_UNIT;
571
6f086dfc
RS
572 /* Round frame offset to that alignment.
573 We must be careful here, since FRAME_OFFSET might be negative and
574 division with a negative dividend isn't as well defined as we might
575 like. So we instead assume that ALIGNMENT is a power of two and
576 use logical operations which are unambiguous. */
577#ifdef FRAME_GROWS_DOWNWARD
578 frame_offset = FLOOR_ROUND (frame_offset, alignment);
579#else
580 frame_offset = CEIL_ROUND (frame_offset, alignment);
581#endif
582
583 /* On a big-endian machine, if we are allocating more space than we will use,
584 use the least significant bytes of those that are allocated. */
585#if BYTES_BIG_ENDIAN
586 if (mode != BLKmode)
587 bigend_correction = size - GET_MODE_SIZE (mode);
588#endif
589
590#ifdef FRAME_GROWS_DOWNWARD
591 frame_offset -= size;
592#endif
593
594 /* If we have already instantiated virtual registers, return the actual
595 address relative to the frame pointer. */
596 if (virtuals_instantiated)
597 addr = plus_constant (frame_pointer_rtx,
598 (frame_offset + bigend_correction
599 + STARTING_FRAME_OFFSET));
600 else
601 addr = plus_constant (virtual_stack_vars_rtx,
602 frame_offset + bigend_correction);
603
604#ifndef FRAME_GROWS_DOWNWARD
605 frame_offset += size;
606#endif
607
608 x = gen_rtx (MEM, mode, addr);
609
610 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
611
612 return x;
613}
614
615/* Assign a stack slot in a containing function.
616 First three arguments are same as in preceding function.
617 The last argument specifies the function to allocate in. */
618
619rtx
620assign_outer_stack_local (mode, size, align, function)
621 enum machine_mode mode;
622 int size;
623 int align;
624 struct function *function;
625{
626 register rtx x, addr;
627 int bigend_correction = 0;
628 int alignment;
629
630 /* Allocate in the memory associated with the function in whose frame
631 we are assigning. */
632 push_obstacks (function->function_obstack,
633 function->function_maybepermanent_obstack);
634
635 if (align == 0)
636 {
637 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
638 if (mode == BLKmode)
639 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
640 }
641 else if (align == -1)
642 {
643 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
644 size = CEIL_ROUND (size, alignment);
645 }
646 else
647 alignment = align / BITS_PER_UNIT;
648
6f086dfc
RS
649 /* Round frame offset to that alignment. */
650#ifdef FRAME_GROWS_DOWNWARD
2af69b62 651 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
6f086dfc 652#else
2af69b62 653 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
6f086dfc
RS
654#endif
655
656 /* On a big-endian machine, if we are allocating more space than we will use,
657 use the least significant bytes of those that are allocated. */
658#if BYTES_BIG_ENDIAN
659 if (mode != BLKmode)
660 bigend_correction = size - GET_MODE_SIZE (mode);
661#endif
662
663#ifdef FRAME_GROWS_DOWNWARD
664 function->frame_offset -= size;
665#endif
666 addr = plus_constant (virtual_stack_vars_rtx,
667 function->frame_offset + bigend_correction);
668#ifndef FRAME_GROWS_DOWNWARD
669 function->frame_offset += size;
670#endif
671
672 x = gen_rtx (MEM, mode, addr);
673
674 function->stack_slot_list
675 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
676
677 pop_obstacks ();
678
679 return x;
680}
681\f
682/* Allocate a temporary stack slot and record it for possible later
683 reuse.
684
685 MODE is the machine mode to be given to the returned rtx.
686
687 SIZE is the size in units of the space required. We do no rounding here
688 since assign_stack_local will do any required rounding.
689
690 KEEP is non-zero if this slot is to be retained after a call to
691 free_temp_slots. Automatic variables for a block are allocated with this
692 flag. */
693
694rtx
695assign_stack_temp (mode, size, keep)
696 enum machine_mode mode;
697 int size;
698 int keep;
699{
700 struct temp_slot *p, *best_p = 0;
701
702 /* First try to find an available, already-allocated temporary that is the
703 exact size we require. */
704 for (p = temp_slots; p; p = p->next)
705 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
706 break;
707
708 /* If we didn't find, one, try one that is larger than what we want. We
709 find the smallest such. */
710 if (p == 0)
711 for (p = temp_slots; p; p = p->next)
712 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
713 && (best_p == 0 || best_p->size > p->size))
714 best_p = p;
715
716 /* Make our best, if any, the one to use. */
717 if (best_p)
718 p = best_p;
719
720 /* If we still didn't find one, make a new temporary. */
721 if (p == 0)
722 {
723 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
724 p->size = size;
725 /* If the temp slot mode doesn't indicate the alignment,
726 use the largest possible, so no one will be disappointed. */
727 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
728 p->next = temp_slots;
729 temp_slots = p;
730 }
731
732 p->in_use = 1;
733 p->level = temp_slot_level;
734 p->keep = keep;
735 return p->slot;
736}
737\f
738/* If X could be a reference to a temporary slot, mark that slot as belonging
739 to the to one level higher. If X matched one of our slots, just mark that
740 one. Otherwise, we can't easily predict which it is, so upgrade all of
741 them. Kept slots need not be touched.
742
743 This is called when an ({...}) construct occurs and a statement
744 returns a value in memory. */
745
746void
747preserve_temp_slots (x)
748 rtx x;
749{
750 struct temp_slot *p;
751
752 /* If X is not in memory or is at a constant address, it cannot be in
753 a temporary slot. */
754 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
755 return;
756
757 /* First see if we can find a match. */
758 for (p = temp_slots; p; p = p->next)
759 if (p->in_use && x == p->slot)
760 {
761 p->level--;
762 return;
763 }
764
765 /* Otherwise, preserve all non-kept slots at this level. */
766 for (p = temp_slots; p; p = p->next)
767 if (p->in_use && p->level == temp_slot_level && ! p->keep)
768 p->level--;
769}
770
771/* Free all temporaries used so far. This is normally called at the end
772 of generating code for a statement. */
773
774void
775free_temp_slots ()
776{
777 struct temp_slot *p;
778
779 for (p = temp_slots; p; p = p->next)
780 if (p->in_use && p->level == temp_slot_level && ! p->keep)
781 p->in_use = 0;
782}
783
784/* Push deeper into the nesting level for stack temporaries. */
785
786void
787push_temp_slots ()
788{
789 /* For GNU C++, we must allow a sequence to be emitted anywhere in
790 the level where the sequence was started. By not changing levels
791 when the compiler is inside a sequence, the temporaries for the
792 sequence and the temporaries will not unwittingly conflict with
793 the temporaries for other sequences and/or code at that level. */
794 if (in_sequence_p ())
795 return;
796
797 temp_slot_level++;
798}
799
800/* Pop a temporary nesting level. All slots in use in the current level
801 are freed. */
802
803void
804pop_temp_slots ()
805{
806 struct temp_slot *p;
807
808 /* See comment in push_temp_slots about why we don't change levels
809 in sequences. */
810 if (in_sequence_p ())
811 return;
812
813 for (p = temp_slots; p; p = p->next)
814 if (p->in_use && p->level == temp_slot_level)
815 p->in_use = 0;
816
817 temp_slot_level--;
818}
819\f
820/* Retroactively move an auto variable from a register to a stack slot.
821 This is done when an address-reference to the variable is seen. */
822
823void
824put_var_into_stack (decl)
825 tree decl;
826{
827 register rtx reg;
00d8a4c1 828 enum machine_mode promoted_mode, decl_mode;
6f086dfc
RS
829 struct function *function = 0;
830 tree context = decl_function_context (decl);
831
00d8a4c1 832 /* Get the current rtl used for this object and it's original mode. */
6f086dfc 833 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
2baccce2
RS
834
835 /* No need to do anything if decl has no rtx yet
836 since in that case caller is setting TREE_ADDRESSABLE
837 and a stack slot will be assigned when the rtl is made. */
838 if (reg == 0)
839 return;
00d8a4c1
RK
840
841 /* Get the declared mode for this object. */
842 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
843 : DECL_MODE (decl));
2baccce2
RS
844 /* Get the mode it's actually stored in. */
845 promoted_mode = GET_MODE (reg);
6f086dfc
RS
846
847 /* If this variable comes from an outer function,
848 find that function's saved context. */
849 if (context != current_function_decl)
850 for (function = outer_function_chain; function; function = function->next)
851 if (function->decl == context)
852 break;
853
6f086dfc
RS
854 /* If this is a variable-size object with a pseudo to address it,
855 put that pseudo into the stack, if the var is nonlocal. */
a82ad570 856 if (DECL_NONLOCAL (decl)
6f086dfc
RS
857 && GET_CODE (reg) == MEM
858 && GET_CODE (XEXP (reg, 0)) == REG
859 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
4cdb3e78
RS
860 {
861 reg = XEXP (reg, 0);
862 decl_mode = promoted_mode = GET_MODE (reg);
863 }
e15762df 864
293e3de4
RS
865 /* Now we should have a value that resides in one or more pseudo regs. */
866
867 if (GET_CODE (reg) == REG)
868 put_reg_into_stack (function, reg, TREE_TYPE (decl),
869 promoted_mode, decl_mode);
870 else if (GET_CODE (reg) == CONCAT)
871 {
872 /* A CONCAT contains two pseudos; put them both in the stack.
873 We do it so they end up consecutive. */
874 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
875 tree part_type = TREE_TYPE (TREE_TYPE (decl));
876#ifdef STACK_GROWS_DOWNWARD
877 /* Since part 0 should have a lower address, do it second. */
878 put_reg_into_stack (function, XEXP (reg, 1),
879 part_type, part_mode, part_mode);
880 put_reg_into_stack (function, XEXP (reg, 0),
881 part_type, part_mode, part_mode);
882#else
883 put_reg_into_stack (function, XEXP (reg, 0),
884 part_type, part_mode, part_mode);
885 put_reg_into_stack (function, XEXP (reg, 1),
886 part_type, part_mode, part_mode);
887#endif
888
889 /* Change the CONCAT into a combined MEM for both parts. */
890 PUT_CODE (reg, MEM);
891 /* The two parts are in memory order already.
892 Use the lower parts address as ours. */
893 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
894 /* Prevent sharing of rtl that might lose. */
895 if (GET_CODE (XEXP (reg, 0)) == PLUS)
896 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
897 }
898}
899
900/* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
901 into the stack frame of FUNCTION (0 means the current function).
902 DECL_MODE is the machine mode of the user-level data type.
903 PROMOTED_MODE is the machine mode of the register. */
904
905static void
906put_reg_into_stack (function, reg, type, promoted_mode, decl_mode)
907 struct function *function;
908 rtx reg;
909 tree type;
910 enum machine_mode promoted_mode, decl_mode;
911{
912 rtx new = 0;
6f086dfc
RS
913
914 if (function)
915 {
916 if (REGNO (reg) < function->max_parm_reg)
917 new = function->parm_reg_stack_loc[REGNO (reg)];
918 if (new == 0)
e15762df 919 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
6f086dfc
RS
920 0, function);
921 }
922 else
923 {
924 if (REGNO (reg) < max_parm_reg)
925 new = parm_reg_stack_loc[REGNO (reg)];
926 if (new == 0)
e15762df 927 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
6f086dfc
RS
928 }
929
930 XEXP (reg, 0) = XEXP (new, 0);
931 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
932 REG_USERVAR_P (reg) = 0;
933 PUT_CODE (reg, MEM);
00d8a4c1 934 PUT_MODE (reg, decl_mode);
6f086dfc
RS
935
936 /* If this is a memory ref that contains aggregate components,
937 mark it as such for cse and loop optimize. */
938 MEM_IN_STRUCT_P (reg)
293e3de4
RS
939 = (TREE_CODE (type) == ARRAY_TYPE
940 || TREE_CODE (type) == RECORD_TYPE
941 || TREE_CODE (type) == UNION_TYPE
942 || TREE_CODE (type) == QUAL_UNION_TYPE);
6f086dfc
RS
943
944 /* Now make sure that all refs to the variable, previously made
945 when it was a register, are fixed up to be valid again. */
946 if (function)
947 {
948 struct var_refs_queue *temp;
949
950 /* Variable is inherited; fix it up when we get back to its function. */
951 push_obstacks (function->function_obstack,
952 function->function_maybepermanent_obstack);
953 temp
954 = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue));
955 temp->modified = reg;
00d8a4c1 956 temp->promoted_mode = promoted_mode;
293e3de4 957 temp->unsignedp = TREE_UNSIGNED (type);
6f086dfc
RS
958 temp->next = function->fixup_var_refs_queue;
959 function->fixup_var_refs_queue = temp;
960 pop_obstacks ();
961 }
962 else
963 /* Variable is local; fix it up now. */
293e3de4 964 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
6f086dfc
RS
965}
966\f
967static void
00d8a4c1 968fixup_var_refs (var, promoted_mode, unsignedp)
6f086dfc 969 rtx var;
00d8a4c1
RK
970 enum machine_mode promoted_mode;
971 int unsignedp;
6f086dfc
RS
972{
973 tree pending;
974 rtx first_insn = get_insns ();
975 struct sequence_stack *stack = sequence_stack;
976 tree rtl_exps = rtl_expr_chain;
977
978 /* Must scan all insns for stack-refs that exceed the limit. */
00d8a4c1 979 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
6f086dfc
RS
980
981 /* Scan all pending sequences too. */
982 for (; stack; stack = stack->next)
983 {
984 push_to_sequence (stack->first);
00d8a4c1
RK
985 fixup_var_refs_insns (var, promoted_mode, unsignedp,
986 stack->first, stack->next != 0);
6f086dfc
RS
987 /* Update remembered end of sequence
988 in case we added an insn at the end. */
989 stack->last = get_last_insn ();
990 end_sequence ();
991 }
992
993 /* Scan all waiting RTL_EXPRs too. */
994 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
995 {
996 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
997 if (seq != const0_rtx && seq != 0)
998 {
999 push_to_sequence (seq);
00d8a4c1 1000 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
6f086dfc
RS
1001 end_sequence ();
1002 }
1003 }
1004}
1005\f
1006/* This structure is used by the following two functions to record MEMs or
1007 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
1008 VAR as an address. We need to maintain this list in case two operands of
1009 an insn were required to match; in that case we must ensure we use the
1010 same replacement. */
1011
1012struct fixup_replacement
1013{
1014 rtx old;
1015 rtx new;
1016 struct fixup_replacement *next;
1017};
1018
1019/* REPLACEMENTS is a pointer to a list of the above structures and X is
1020 some part of an insn. Return a struct fixup_replacement whose OLD
1021 value is equal to X. Allocate a new structure if no such entry exists. */
1022
1023static struct fixup_replacement *
2740a678 1024find_fixup_replacement (replacements, x)
6f086dfc
RS
1025 struct fixup_replacement **replacements;
1026 rtx x;
1027{
1028 struct fixup_replacement *p;
1029
1030 /* See if we have already replaced this. */
1031 for (p = *replacements; p && p->old != x; p = p->next)
1032 ;
1033
1034 if (p == 0)
1035 {
1036 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1037 p->old = x;
1038 p->new = 0;
1039 p->next = *replacements;
1040 *replacements = p;
1041 }
1042
1043 return p;
1044}
1045
1046/* Scan the insn-chain starting with INSN for refs to VAR
1047 and fix them up. TOPLEVEL is nonzero if this chain is the
1048 main chain of insns for the current function. */
1049
1050static void
00d8a4c1 1051fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
6f086dfc 1052 rtx var;
00d8a4c1
RK
1053 enum machine_mode promoted_mode;
1054 int unsignedp;
6f086dfc
RS
1055 rtx insn;
1056 int toplevel;
1057{
02a10449
RK
1058 rtx call_dest = 0;
1059
6f086dfc
RS
1060 while (insn)
1061 {
1062 rtx next = NEXT_INSN (insn);
1063 rtx note;
e15762df 1064 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6f086dfc
RS
1065 {
1066 /* The insn to load VAR from a home in the arglist
1067 is now a no-op. When we see it, just delete it. */
1068 if (toplevel
1069 && GET_CODE (PATTERN (insn)) == SET
1070 && SET_DEST (PATTERN (insn)) == var
d1e37dc7
RS
1071 /* If this represents the result of an insn group,
1072 don't delete the insn. */
1073 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
6f086dfc
RS
1074 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1075 {
b4ff474c
RS
1076 /* In unoptimized compilation, we shouldn't call delete_insn
1077 except in jump.c doing warnings. */
1078 PUT_CODE (insn, NOTE);
1079 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1080 NOTE_SOURCE_FILE (insn) = 0;
6f086dfc
RS
1081 if (insn == last_parm_insn)
1082 last_parm_insn = PREV_INSN (next);
1083 }
1084 else
1085 {
02a10449
RK
1086 struct fixup_replacement *replacements = 0;
1087 rtx next_insn = NEXT_INSN (insn);
1088
1089#ifdef SMALL_REGISTER_CLASSES
1090 /* If the insn that copies the results of a CALL_INSN
1091 into a pseudo now references VAR, we have to use an
1092 intermediate pseudo since we want the life of the
1093 return value register to be only a single insn.
1094
1095 If we don't use an intermediate pseudo, such things as
1096 address computations to make the address of VAR valid
1097 if it is not can be placed beween the CALL_INSN and INSN.
1098
1099 To make sure this doesn't happen, we record the destination
1100 of the CALL_INSN and see if the next insn uses both that
1101 and VAR. */
1102
1103 if (call_dest != 0 && GET_CODE (insn) == INSN
1104 && reg_mentioned_p (var, PATTERN (insn))
1105 && reg_mentioned_p (call_dest, PATTERN (insn)))
1106 {
1107 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1108
1109 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1110
1111 PATTERN (insn) = replace_rtx (PATTERN (insn),
1112 call_dest, temp);
1113 }
1114
1115 if (GET_CODE (insn) == CALL_INSN
1116 && GET_CODE (PATTERN (insn)) == SET)
1117 call_dest = SET_DEST (PATTERN (insn));
1118 else if (GET_CODE (insn) == CALL_INSN
1119 && GET_CODE (PATTERN (insn)) == PARALLEL
1120 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1121 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1122 else
1123 call_dest = 0;
1124#endif
1125
6f086dfc
RS
1126 /* See if we have to do anything to INSN now that VAR is in
1127 memory. If it needs to be loaded into a pseudo, use a single
1128 pseudo for the entire insn in case there is a MATCH_DUP
1129 between two operands. We pass a pointer to the head of
1130 a list of struct fixup_replacements. If fixup_var_refs_1
1131 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1132 it will record them in this list.
1133
1134 If it allocated a pseudo for any replacement, we copy into
1135 it here. */
1136
00d8a4c1
RK
1137 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1138 &replacements);
6f086dfc 1139
77121fee
JW
1140 /* If this is last_parm_insn, and any instructions were output
1141 after it to fix it up, then we must set last_parm_insn to
1142 the last such instruction emitted. */
1143 if (insn == last_parm_insn)
1144 last_parm_insn = PREV_INSN (next_insn);
1145
6f086dfc
RS
1146 while (replacements)
1147 {
1148 if (GET_CODE (replacements->new) == REG)
1149 {
1150 rtx insert_before;
00d8a4c1 1151 rtx seq;
6f086dfc
RS
1152
1153 /* OLD might be a (subreg (mem)). */
1154 if (GET_CODE (replacements->old) == SUBREG)
1155 replacements->old
1156 = fixup_memory_subreg (replacements->old, insn, 0);
1157 else
1158 replacements->old
1159 = fixup_stack_1 (replacements->old, insn);
1160
1161 /* We can not separate USE insns from the CALL_INSN
1162 that they belong to. If this is a CALL_INSN, insert
b335c2cc 1163 the move insn before the USE insns preceding it
6f086dfc
RS
1164 instead of immediately before the insn. */
1165 if (GET_CODE (insn) == CALL_INSN)
1166 {
1167 insert_before = insn;
1168 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1169 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1170 insert_before = PREV_INSN (insert_before);
1171 }
1172 else
1173 insert_before = insn;
1174
00d8a4c1
RK
1175 /* If we are changing the mode, do a conversion.
1176 This might be wasteful, but combine.c will
1177 eliminate much of the waste. */
1178
1179 if (GET_MODE (replacements->new)
1180 != GET_MODE (replacements->old))
1181 {
1182 start_sequence ();
1183 convert_move (replacements->new,
1184 replacements->old, unsignedp);
1185 seq = gen_sequence ();
1186 end_sequence ();
1187 }
1188 else
1189 seq = gen_move_insn (replacements->new,
1190 replacements->old);
1191
1192 emit_insn_before (seq, insert_before);
6f086dfc
RS
1193 }
1194
1195 replacements = replacements->next;
1196 }
1197 }
1198
1199 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1200 But don't touch other insns referred to by reg-notes;
1201 we will get them elsewhere. */
1202 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1203 if (GET_CODE (note) != INSN_LIST)
ab6155b7
RK
1204 XEXP (note, 0)
1205 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
6f086dfc
RS
1206 }
1207 insn = next;
1208 }
1209}
1210\f
00d8a4c1
RK
1211/* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1212 See if the rtx expression at *LOC in INSN needs to be changed.
6f086dfc
RS
1213
1214 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1215 contain a list of original rtx's and replacements. If we find that we need
1216 to modify this insn by replacing a memory reference with a pseudo or by
1217 making a new MEM to implement a SUBREG, we consult that list to see if
1218 we have already chosen a replacement. If none has already been allocated,
1219 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1220 or the SUBREG, as appropriate, to the pseudo. */
1221
1222static void
00d8a4c1 1223fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
6f086dfc 1224 register rtx var;
00d8a4c1 1225 enum machine_mode promoted_mode;
6f086dfc
RS
1226 register rtx *loc;
1227 rtx insn;
1228 struct fixup_replacement **replacements;
1229{
1230 register int i;
1231 register rtx x = *loc;
1232 RTX_CODE code = GET_CODE (x);
1233 register char *fmt;
1234 register rtx tem, tem1;
1235 struct fixup_replacement *replacement;
1236
1237 switch (code)
1238 {
1239 case MEM:
1240 if (var == x)
1241 {
1242 /* If we already have a replacement, use it. Otherwise,
1243 try to fix up this address in case it is invalid. */
1244
2740a678 1245 replacement = find_fixup_replacement (replacements, var);
6f086dfc
RS
1246 if (replacement->new)
1247 {
1248 *loc = replacement->new;
1249 return;
1250 }
1251
1252 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1253
00d8a4c1
RK
1254 /* Unless we are forcing memory to register or we changed the mode,
1255 we can leave things the way they are if the insn is valid. */
6f086dfc
RS
1256
1257 INSN_CODE (insn) = -1;
00d8a4c1
RK
1258 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1259 && recog_memoized (insn) >= 0)
6f086dfc
RS
1260 return;
1261
00d8a4c1 1262 *loc = replacement->new = gen_reg_rtx (promoted_mode);
6f086dfc
RS
1263 return;
1264 }
1265
1266 /* If X contains VAR, we need to unshare it here so that we update
1267 each occurrence separately. But all identical MEMs in one insn
1268 must be replaced with the same rtx because of the possibility of
1269 MATCH_DUPs. */
1270
1271 if (reg_mentioned_p (var, x))
1272 {
2740a678 1273 replacement = find_fixup_replacement (replacements, x);
6f086dfc
RS
1274 if (replacement->new == 0)
1275 replacement->new = copy_most_rtx (x, var);
1276
1277 *loc = x = replacement->new;
1278 }
1279 break;
1280
1281 case REG:
1282 case CC0:
1283 case PC:
1284 case CONST_INT:
1285 case CONST:
1286 case SYMBOL_REF:
1287 case LABEL_REF:
1288 case CONST_DOUBLE:
1289 return;
1290
1291 case SIGN_EXTRACT:
1292 case ZERO_EXTRACT:
1293 /* Note that in some cases those types of expressions are altered
1294 by optimize_bit_field, and do not survive to get here. */
1295 if (XEXP (x, 0) == var
1296 || (GET_CODE (XEXP (x, 0)) == SUBREG
1297 && SUBREG_REG (XEXP (x, 0)) == var))
1298 {
1299 /* Get TEM as a valid MEM in the mode presently in the insn.
1300
1301 We don't worry about the possibility of MATCH_DUP here; it
1302 is highly unlikely and would be tricky to handle. */
1303
1304 tem = XEXP (x, 0);
1305 if (GET_CODE (tem) == SUBREG)
1306 tem = fixup_memory_subreg (tem, insn, 1);
1307 tem = fixup_stack_1 (tem, insn);
1308
1309 /* Unless we want to load from memory, get TEM into the proper mode
1310 for an extract from memory. This can only be done if the
1311 extract is at a constant position and length. */
1312
1313 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1314 && GET_CODE (XEXP (x, 2)) == CONST_INT
1315 && ! mode_dependent_address_p (XEXP (tem, 0))
1316 && ! MEM_VOLATILE_P (tem))
1317 {
1318 enum machine_mode wanted_mode = VOIDmode;
1319 enum machine_mode is_mode = GET_MODE (tem);
1320 int width = INTVAL (XEXP (x, 1));
1321 int pos = INTVAL (XEXP (x, 2));
1322
1323#ifdef HAVE_extzv
1324 if (GET_CODE (x) == ZERO_EXTRACT)
1325 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1326#endif
1327#ifdef HAVE_extv
1328 if (GET_CODE (x) == SIGN_EXTRACT)
1329 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1330#endif
6dc42e49 1331 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
1332 if (wanted_mode != VOIDmode
1333 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1334 {
1335 int offset = pos / BITS_PER_UNIT;
1336 rtx old_pos = XEXP (x, 2);
1337 rtx newmem;
1338
1339 /* If the bytes and bits are counted differently, we
1340 must adjust the offset. */
1341#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1342 offset = (GET_MODE_SIZE (is_mode)
1343 - GET_MODE_SIZE (wanted_mode) - offset);
1344#endif
1345
1346 pos %= GET_MODE_BITSIZE (wanted_mode);
1347
1348 newmem = gen_rtx (MEM, wanted_mode,
1349 plus_constant (XEXP (tem, 0), offset));
1350 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1351 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1352 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1353
1354 /* Make the change and see if the insn remains valid. */
1355 INSN_CODE (insn) = -1;
1356 XEXP (x, 0) = newmem;
5f4f0e22 1357 XEXP (x, 2) = GEN_INT (pos);
6f086dfc
RS
1358
1359 if (recog_memoized (insn) >= 0)
1360 return;
1361
1362 /* Otherwise, restore old position. XEXP (x, 0) will be
1363 restored later. */
1364 XEXP (x, 2) = old_pos;
1365 }
1366 }
1367
1368 /* If we get here, the bitfield extract insn can't accept a memory
1369 reference. Copy the input into a register. */
1370
1371 tem1 = gen_reg_rtx (GET_MODE (tem));
1372 emit_insn_before (gen_move_insn (tem1, tem), insn);
1373 XEXP (x, 0) = tem1;
1374 return;
1375 }
1376 break;
1377
1378 case SUBREG:
1379 if (SUBREG_REG (x) == var)
1380 {
00d8a4c1
RK
1381 /* If this is a special SUBREG made because VAR was promoted
1382 from a wider mode, replace it with VAR and call ourself
1383 recursively, this time saying that the object previously
1384 had its current mode (by virtue of the SUBREG). */
1385
1386 if (SUBREG_PROMOTED_VAR_P (x))
1387 {
1388 *loc = var;
1389 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1390 return;
1391 }
1392
6f086dfc
RS
1393 /* If this SUBREG makes VAR wider, it has become a paradoxical
1394 SUBREG with VAR in memory, but these aren't allowed at this
1395 stage of the compilation. So load VAR into a pseudo and take
1396 a SUBREG of that pseudo. */
1397 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1398 {
2740a678 1399 replacement = find_fixup_replacement (replacements, var);
6f086dfc
RS
1400 if (replacement->new == 0)
1401 replacement->new = gen_reg_rtx (GET_MODE (var));
1402 SUBREG_REG (x) = replacement->new;
1403 return;
1404 }
1405
1406 /* See if we have already found a replacement for this SUBREG.
1407 If so, use it. Otherwise, make a MEM and see if the insn
1408 is recognized. If not, or if we should force MEM into a register,
1409 make a pseudo for this SUBREG. */
2740a678 1410 replacement = find_fixup_replacement (replacements, x);
6f086dfc
RS
1411 if (replacement->new)
1412 {
1413 *loc = replacement->new;
1414 return;
1415 }
1416
1417 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1418
f898f031 1419 INSN_CODE (insn) = -1;
6f086dfc
RS
1420 if (! flag_force_mem && recog_memoized (insn) >= 0)
1421 return;
1422
1423 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1424 return;
1425 }
1426 break;
1427
1428 case SET:
1429 /* First do special simplification of bit-field references. */
1430 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1431 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1432 optimize_bit_field (x, insn, 0);
1433 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1434 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
5f4f0e22 1435 optimize_bit_field (x, insn, NULL_PTR);
6f086dfc
RS
1436
1437 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1438 insn into a pseudo and store the low part of the pseudo into VAR. */
1439 if (GET_CODE (SET_DEST (x)) == SUBREG
1440 && SUBREG_REG (SET_DEST (x)) == var
1441 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1442 > GET_MODE_SIZE (GET_MODE (var))))
1443 {
1444 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1445 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1446 tem)),
1447 insn);
1448 break;
1449 }
1450
1451 {
1452 rtx dest = SET_DEST (x);
1453 rtx src = SET_SRC (x);
1454 rtx outerdest = dest;
1455
1456 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1457 || GET_CODE (dest) == SIGN_EXTRACT
1458 || GET_CODE (dest) == ZERO_EXTRACT)
1459 dest = XEXP (dest, 0);
1460
1461 if (GET_CODE (src) == SUBREG)
1462 src = XEXP (src, 0);
1463
1464 /* If VAR does not appear at the top level of the SET
1465 just scan the lower levels of the tree. */
1466
1467 if (src != var && dest != var)
1468 break;
1469
1470 /* We will need to rerecognize this insn. */
1471 INSN_CODE (insn) = -1;
1472
1473#ifdef HAVE_insv
1474 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1475 {
1476 /* Since this case will return, ensure we fixup all the
1477 operands here. */
00d8a4c1
RK
1478 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1479 insn, replacements);
1480 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1481 insn, replacements);
1482 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1483 insn, replacements);
6f086dfc
RS
1484
1485 tem = XEXP (outerdest, 0);
1486
1487 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1488 that may appear inside a ZERO_EXTRACT.
1489 This was legitimate when the MEM was a REG. */
1490 if (GET_CODE (tem) == SUBREG
1491 && SUBREG_REG (tem) == var)
1492 tem = fixup_memory_subreg (tem, insn, 1);
1493 else
1494 tem = fixup_stack_1 (tem, insn);
1495
1496 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1497 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1498 && ! mode_dependent_address_p (XEXP (tem, 0))
1499 && ! MEM_VOLATILE_P (tem))
1500 {
1501 enum machine_mode wanted_mode
1502 = insn_operand_mode[(int) CODE_FOR_insv][0];
1503 enum machine_mode is_mode = GET_MODE (tem);
1504 int width = INTVAL (XEXP (outerdest, 1));
1505 int pos = INTVAL (XEXP (outerdest, 2));
1506
6dc42e49 1507 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
1508 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1509 {
1510 int offset = pos / BITS_PER_UNIT;
1511 rtx old_pos = XEXP (outerdest, 2);
1512 rtx newmem;
1513
1514#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1515 offset = (GET_MODE_SIZE (is_mode)
1516 - GET_MODE_SIZE (wanted_mode) - offset);
1517#endif
1518
1519 pos %= GET_MODE_BITSIZE (wanted_mode);
1520
1521 newmem = gen_rtx (MEM, wanted_mode,
1522 plus_constant (XEXP (tem, 0), offset));
1523 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1524 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1525 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1526
1527 /* Make the change and see if the insn remains valid. */
1528 INSN_CODE (insn) = -1;
1529 XEXP (outerdest, 0) = newmem;
5f4f0e22 1530 XEXP (outerdest, 2) = GEN_INT (pos);
6f086dfc
RS
1531
1532 if (recog_memoized (insn) >= 0)
1533 return;
1534
1535 /* Otherwise, restore old position. XEXP (x, 0) will be
1536 restored later. */
1537 XEXP (outerdest, 2) = old_pos;
1538 }
1539 }
1540
1541 /* If we get here, the bit-field store doesn't allow memory
1542 or isn't located at a constant position. Load the value into
1543 a register, do the store, and put it back into memory. */
1544
1545 tem1 = gen_reg_rtx (GET_MODE (tem));
1546 emit_insn_before (gen_move_insn (tem1, tem), insn);
1547 emit_insn_after (gen_move_insn (tem, tem1), insn);
1548 XEXP (outerdest, 0) = tem1;
1549 return;
1550 }
1551#endif
1552
1553 /* STRICT_LOW_PART is a no-op on memory references
1554 and it can cause combinations to be unrecognizable,
1555 so eliminate it. */
1556
1557 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1558 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1559
1560 /* A valid insn to copy VAR into or out of a register
1561 must be left alone, to avoid an infinite loop here.
1562 If the reference to VAR is by a subreg, fix that up,
1563 since SUBREG is not valid for a memref.
e15762df
RK
1564 Also fix up the address of the stack slot.
1565
1566 Note that we must not try to recognize the insn until
1567 after we know that we have valid addresses and no
1568 (subreg (mem ...) ...) constructs, since these interfere
1569 with determining the validity of the insn. */
6f086dfc
RS
1570
1571 if ((SET_SRC (x) == var
1572 || (GET_CODE (SET_SRC (x)) == SUBREG
1573 && SUBREG_REG (SET_SRC (x)) == var))
1574 && (GET_CODE (SET_DEST (x)) == REG
1575 || (GET_CODE (SET_DEST (x)) == SUBREG
1576 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
e15762df 1577 && x == single_set (PATTERN (insn)))
6f086dfc 1578 {
e15762df
RK
1579 rtx pat;
1580
2740a678 1581 replacement = find_fixup_replacement (replacements, SET_SRC (x));
6f086dfc 1582 if (replacement->new)
6f086dfc 1583 SET_SRC (x) = replacement->new;
6f086dfc
RS
1584 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1585 SET_SRC (x) = replacement->new
1586 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1587 else
1588 SET_SRC (x) = replacement->new
1589 = fixup_stack_1 (SET_SRC (x), insn);
e15762df
RK
1590
1591 if (recog_memoized (insn) >= 0)
1592 return;
1593
1594 /* INSN is not valid, but we know that we want to
1595 copy SET_SRC (x) to SET_DEST (x) in some way. So
1596 we generate the move and see whether it requires more
1597 than one insn. If it does, we emit those insns and
1598 delete INSN. Otherwise, we an just replace the pattern
1599 of INSN; we have already verified above that INSN has
1600 no other function that to do X. */
1601
1602 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
1603 if (GET_CODE (pat) == SEQUENCE)
1604 {
1605 emit_insn_after (pat, insn);
1606 PUT_CODE (insn, NOTE);
1607 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1608 NOTE_SOURCE_FILE (insn) = 0;
1609 }
1610 else
1611 PATTERN (insn) = pat;
1612
6f086dfc
RS
1613 return;
1614 }
1615
1616 if ((SET_DEST (x) == var
1617 || (GET_CODE (SET_DEST (x)) == SUBREG
1618 && SUBREG_REG (SET_DEST (x)) == var))
1619 && (GET_CODE (SET_SRC (x)) == REG
1620 || (GET_CODE (SET_SRC (x)) == SUBREG
1621 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
e15762df 1622 && x == single_set (PATTERN (insn)))
6f086dfc 1623 {
e15762df
RK
1624 rtx pat;
1625
6f086dfc
RS
1626 if (GET_CODE (SET_DEST (x)) == SUBREG)
1627 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1628 else
1629 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
e15762df
RK
1630
1631 if (recog_memoized (insn) >= 0)
1632 return;
1633
1634 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
1635 if (GET_CODE (pat) == SEQUENCE)
1636 {
1637 emit_insn_after (pat, insn);
1638 PUT_CODE (insn, NOTE);
1639 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1640 NOTE_SOURCE_FILE (insn) = 0;
1641 }
1642 else
1643 PATTERN (insn) = pat;
1644
6f086dfc
RS
1645 return;
1646 }
1647
1648 /* Otherwise, storing into VAR must be handled specially
1649 by storing into a temporary and copying that into VAR
00d8a4c1
RK
1650 with a new insn after this one. Note that this case
1651 will be used when storing into a promoted scalar since
1652 the insn will now have different modes on the input
1653 and output and hence will be invalid (except for the case
1654 of setting it to a constant, which does not need any
1655 change if it is valid). We generate extra code in that case,
1656 but combine.c will eliminate it. */
6f086dfc
RS
1657
1658 if (dest == var)
1659 {
1660 rtx temp;
00d8a4c1
RK
1661 rtx fixeddest = SET_DEST (x);
1662
6f086dfc 1663 /* STRICT_LOW_PART can be discarded, around a MEM. */
00d8a4c1
RK
1664 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
1665 fixeddest = XEXP (fixeddest, 0);
6f086dfc 1666 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
00d8a4c1
RK
1667 if (GET_CODE (fixeddest) == SUBREG)
1668 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
6f086dfc 1669 else
00d8a4c1
RK
1670 fixeddest = fixup_stack_1 (fixeddest, insn);
1671
1672 temp = gen_reg_rtx (GET_MODE (SET_SRC (x)) == VOIDmode
1673 ? GET_MODE (fixeddest)
1674 : GET_MODE (SET_SRC (x)));
1675
1676 emit_insn_after (gen_move_insn (fixeddest,
1677 gen_lowpart (GET_MODE (fixeddest),
1678 temp)),
1679 insn);
6f086dfc 1680
6f086dfc
RS
1681 SET_DEST (x) = temp;
1682 }
1683 }
1684 }
1685
1686 /* Nothing special about this RTX; fix its operands. */
1687
1688 fmt = GET_RTX_FORMAT (code);
1689 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1690 {
1691 if (fmt[i] == 'e')
00d8a4c1 1692 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
6f086dfc
RS
1693 if (fmt[i] == 'E')
1694 {
1695 register int j;
1696 for (j = 0; j < XVECLEN (x, i); j++)
00d8a4c1
RK
1697 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
1698 insn, replacements);
6f086dfc
RS
1699 }
1700 }
1701}
1702\f
1703/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1704 return an rtx (MEM:m1 newaddr) which is equivalent.
1705 If any insns must be emitted to compute NEWADDR, put them before INSN.
1706
1707 UNCRITICAL nonzero means accept paradoxical subregs.
ab6155b7 1708 This is used for subregs found inside of ZERO_EXTRACTs and in REG_NOTES. */
6f086dfc
RS
1709
1710static rtx
1711fixup_memory_subreg (x, insn, uncritical)
1712 rtx x;
1713 rtx insn;
1714 int uncritical;
1715{
1716 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1717 rtx addr = XEXP (SUBREG_REG (x), 0);
1718 enum machine_mode mode = GET_MODE (x);
1719 rtx saved, result;
1720
1721 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1722 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1723 && ! uncritical)
1724 abort ();
1725
1726#if BYTES_BIG_ENDIAN
1727 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1728 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1729#endif
1730 addr = plus_constant (addr, offset);
1731 if (!flag_force_addr && memory_address_p (mode, addr))
1732 /* Shortcut if no insns need be emitted. */
1733 return change_address (SUBREG_REG (x), mode, addr);
1734 start_sequence ();
1735 result = change_address (SUBREG_REG (x), mode, addr);
1736 emit_insn_before (gen_sequence (), insn);
1737 end_sequence ();
1738 return result;
1739}
1740
1741/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1742 Replace subexpressions of X in place.
1743 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1744 Otherwise return X, with its contents possibly altered.
1745
ab6155b7
RK
1746 If any insns must be emitted to compute NEWADDR, put them before INSN.
1747
1748 UNCRITICAL is as in fixup_memory_subreg. */
6f086dfc
RS
1749
1750static rtx
ab6155b7 1751walk_fixup_memory_subreg (x, insn, uncritical)
6f086dfc
RS
1752 register rtx x;
1753 rtx insn;
ab6155b7 1754 int uncritical;
6f086dfc
RS
1755{
1756 register enum rtx_code code;
1757 register char *fmt;
1758 register int i;
1759
1760 if (x == 0)
1761 return 0;
1762
1763 code = GET_CODE (x);
1764
1765 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
ab6155b7 1766 return fixup_memory_subreg (x, insn, uncritical);
6f086dfc
RS
1767
1768 /* Nothing special about this RTX; fix its operands. */
1769
1770 fmt = GET_RTX_FORMAT (code);
1771 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1772 {
1773 if (fmt[i] == 'e')
ab6155b7 1774 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
6f086dfc
RS
1775 if (fmt[i] == 'E')
1776 {
1777 register int j;
1778 for (j = 0; j < XVECLEN (x, i); j++)
1779 XVECEXP (x, i, j)
ab6155b7 1780 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
6f086dfc
RS
1781 }
1782 }
1783 return x;
1784}
1785\f
1786#if 0
1787/* Fix up any references to stack slots that are invalid memory addresses
1788 because they exceed the maximum range of a displacement. */
1789
1790void
1791fixup_stack_slots ()
1792{
1793 register rtx insn;
1794
1795 /* Did we generate a stack slot that is out of range
1796 or otherwise has an invalid address? */
1797 if (invalid_stack_slot)
1798 {
1799 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1800 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1801 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1802 || GET_CODE (insn) == JUMP_INSN)
1803 fixup_stack_1 (PATTERN (insn), insn);
1804 }
1805}
1806#endif
1807
1808/* For each memory ref within X, if it refers to a stack slot
1809 with an out of range displacement, put the address in a temp register
1810 (emitting new insns before INSN to load these registers)
1811 and alter the memory ref to use that register.
1812 Replace each such MEM rtx with a copy, to avoid clobberage. */
1813
1814static rtx
1815fixup_stack_1 (x, insn)
1816 rtx x;
1817 rtx insn;
1818{
1819 register int i;
1820 register RTX_CODE code = GET_CODE (x);
1821 register char *fmt;
1822
1823 if (code == MEM)
1824 {
1825 register rtx ad = XEXP (x, 0);
1826 /* If we have address of a stack slot but it's not valid
1827 (displacement is too large), compute the sum in a register. */
1828 if (GET_CODE (ad) == PLUS
1829 && GET_CODE (XEXP (ad, 0)) == REG
40d05551
RK
1830 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1831 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
1832 || XEXP (ad, 0) == current_function_internal_arg_pointer)
6f086dfc
RS
1833 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1834 {
1835 rtx temp, seq;
1836 if (memory_address_p (GET_MODE (x), ad))
1837 return x;
1838
1839 start_sequence ();
1840 temp = copy_to_reg (ad);
1841 seq = gen_sequence ();
1842 end_sequence ();
1843 emit_insn_before (seq, insn);
1844 return change_address (x, VOIDmode, temp);
1845 }
1846 return x;
1847 }
1848
1849 fmt = GET_RTX_FORMAT (code);
1850 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1851 {
1852 if (fmt[i] == 'e')
1853 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1854 if (fmt[i] == 'E')
1855 {
1856 register int j;
1857 for (j = 0; j < XVECLEN (x, i); j++)
1858 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
1859 }
1860 }
1861 return x;
1862}
1863\f
1864/* Optimization: a bit-field instruction whose field
1865 happens to be a byte or halfword in memory
1866 can be changed to a move instruction.
1867
1868 We call here when INSN is an insn to examine or store into a bit-field.
1869 BODY is the SET-rtx to be altered.
1870
1871 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1872 (Currently this is called only from function.c, and EQUIV_MEM
1873 is always 0.) */
1874
1875static void
1876optimize_bit_field (body, insn, equiv_mem)
1877 rtx body;
1878 rtx insn;
1879 rtx *equiv_mem;
1880{
1881 register rtx bitfield;
1882 int destflag;
1883 rtx seq = 0;
1884 enum machine_mode mode;
1885
1886 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
1887 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
1888 bitfield = SET_DEST (body), destflag = 1;
1889 else
1890 bitfield = SET_SRC (body), destflag = 0;
1891
1892 /* First check that the field being stored has constant size and position
1893 and is in fact a byte or halfword suitably aligned. */
1894
1895 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
1896 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
1897 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
1898 != BLKmode)
1899 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
1900 {
1901 register rtx memref = 0;
1902
1903 /* Now check that the containing word is memory, not a register,
1904 and that it is safe to change the machine mode. */
1905
1906 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
1907 memref = XEXP (bitfield, 0);
1908 else if (GET_CODE (XEXP (bitfield, 0)) == REG
1909 && equiv_mem != 0)
1910 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
1911 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1912 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
1913 memref = SUBREG_REG (XEXP (bitfield, 0));
1914 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1915 && equiv_mem != 0
1916 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
1917 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
1918
1919 if (memref
1920 && ! mode_dependent_address_p (XEXP (memref, 0))
1921 && ! MEM_VOLATILE_P (memref))
1922 {
1923 /* Now adjust the address, first for any subreg'ing
1924 that we are now getting rid of,
1925 and then for which byte of the word is wanted. */
1926
1927 register int offset = INTVAL (XEXP (bitfield, 2));
1928 /* Adjust OFFSET to count bits from low-address byte. */
1929#if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1930 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
1931 - offset - INTVAL (XEXP (bitfield, 1)));
1932#endif
1933 /* Adjust OFFSET to count bytes from low-address byte. */
1934 offset /= BITS_PER_UNIT;
1935 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
1936 {
1937 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
1938#if BYTES_BIG_ENDIAN
1939 offset -= (MIN (UNITS_PER_WORD,
1940 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
1941 - MIN (UNITS_PER_WORD,
1942 GET_MODE_SIZE (GET_MODE (memref))));
1943#endif
1944 }
1945
1946 memref = change_address (memref, mode,
1947 plus_constant (XEXP (memref, 0), offset));
1948
1949 /* Store this memory reference where
1950 we found the bit field reference. */
1951
1952 if (destflag)
1953 {
1954 validate_change (insn, &SET_DEST (body), memref, 1);
1955 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
1956 {
1957 rtx src = SET_SRC (body);
1958 while (GET_CODE (src) == SUBREG
1959 && SUBREG_WORD (src) == 0)
1960 src = SUBREG_REG (src);
1961 if (GET_MODE (src) != GET_MODE (memref))
1962 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
1963 validate_change (insn, &SET_SRC (body), src, 1);
1964 }
1965 else if (GET_MODE (SET_SRC (body)) != VOIDmode
1966 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
1967 /* This shouldn't happen because anything that didn't have
1968 one of these modes should have got converted explicitly
1969 and then referenced through a subreg.
1970 This is so because the original bit-field was
1971 handled by agg_mode and so its tree structure had
1972 the same mode that memref now has. */
1973 abort ();
1974 }
1975 else
1976 {
1977 rtx dest = SET_DEST (body);
1978
1979 while (GET_CODE (dest) == SUBREG
1980 && SUBREG_WORD (dest) == 0)
1981 dest = SUBREG_REG (dest);
1982
1983 validate_change (insn, &SET_DEST (body), dest, 1);
1984
1985 if (GET_MODE (dest) == GET_MODE (memref))
1986 validate_change (insn, &SET_SRC (body), memref, 1);
1987 else
1988 {
1989 /* Convert the mem ref to the destination mode. */
1990 rtx newreg = gen_reg_rtx (GET_MODE (dest));
1991
1992 start_sequence ();
1993 convert_move (newreg, memref,
1994 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
1995 seq = get_insns ();
1996 end_sequence ();
1997
1998 validate_change (insn, &SET_SRC (body), newreg, 1);
1999 }
2000 }
2001
2002 /* See if we can convert this extraction or insertion into
2003 a simple move insn. We might not be able to do so if this
2004 was, for example, part of a PARALLEL.
2005
2006 If we succeed, write out any needed conversions. If we fail,
2007 it is hard to guess why we failed, so don't do anything
2008 special; just let the optimization be suppressed. */
2009
2010 if (apply_change_group () && seq)
2011 emit_insns_before (seq, insn);
2012 }
2013 }
2014}
2015\f
2016/* These routines are responsible for converting virtual register references
2017 to the actual hard register references once RTL generation is complete.
2018
2019 The following four variables are used for communication between the
2020 routines. They contain the offsets of the virtual registers from their
2021 respective hard registers. */
2022
2023static int in_arg_offset;
2024static int var_offset;
2025static int dynamic_offset;
2026static int out_arg_offset;
2027
2028/* In most machines, the stack pointer register is equivalent to the bottom
2029 of the stack. */
2030
2031#ifndef STACK_POINTER_OFFSET
2032#define STACK_POINTER_OFFSET 0
2033#endif
2034
2035/* If not defined, pick an appropriate default for the offset of dynamically
2036 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2037 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2038
2039#ifndef STACK_DYNAMIC_OFFSET
2040
2041#ifdef ACCUMULATE_OUTGOING_ARGS
2042/* The bottom of the stack points to the actual arguments. If
2043 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2044 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2045 stack space for register parameters is not pushed by the caller, but
2046 rather part of the fixed stack areas and hence not included in
2047 `current_function_outgoing_args_size'. Nevertheless, we must allow
2048 for it when allocating stack dynamic objects. */
2049
2050#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2051#define STACK_DYNAMIC_OFFSET(FNDECL) \
2052(current_function_outgoing_args_size \
2053 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2054
2055#else
2056#define STACK_DYNAMIC_OFFSET(FNDECL) \
2057(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2058#endif
2059
2060#else
2061#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2062#endif
2063#endif
2064
2065/* Pass through the INSNS of function FNDECL and convert virtual register
2066 references to hard register references. */
2067
2068void
2069instantiate_virtual_regs (fndecl, insns)
2070 tree fndecl;
2071 rtx insns;
2072{
2073 rtx insn;
2074
2075 /* Compute the offsets to use for this function. */
2076 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2077 var_offset = STARTING_FRAME_OFFSET;
2078 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2079 out_arg_offset = STACK_POINTER_OFFSET;
2080
2081 /* Scan all variables and parameters of this function. For each that is
2082 in memory, instantiate all virtual registers if the result is a valid
2083 address. If not, we do it later. That will handle most uses of virtual
2084 regs on many machines. */
2085 instantiate_decls (fndecl, 1);
2086
2087 /* Initialize recognition, indicating that volatile is OK. */
2088 init_recog ();
2089
2090 /* Scan through all the insns, instantiating every virtual register still
2091 present. */
2092 for (insn = insns; insn; insn = NEXT_INSN (insn))
2093 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2094 || GET_CODE (insn) == CALL_INSN)
2095 {
2096 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
5f4f0e22 2097 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
6f086dfc
RS
2098 }
2099
2100 /* Now instantiate the remaining register equivalences for debugging info.
2101 These will not be valid addresses. */
2102 instantiate_decls (fndecl, 0);
2103
2104 /* Indicate that, from now on, assign_stack_local should use
2105 frame_pointer_rtx. */
2106 virtuals_instantiated = 1;
2107}
2108
2109/* Scan all decls in FNDECL (both variables and parameters) and instantiate
2110 all virtual registers in their DECL_RTL's.
2111
2112 If VALID_ONLY, do this only if the resulting address is still valid.
2113 Otherwise, always do it. */
2114
2115static void
2116instantiate_decls (fndecl, valid_only)
2117 tree fndecl;
2118 int valid_only;
2119{
2120 tree decl;
2121
a82ad570 2122 if (DECL_INLINE (fndecl))
6f086dfc
RS
2123 /* When compiling an inline function, the obstack used for
2124 rtl allocation is the maybepermanent_obstack. Calling
2125 `resume_temporary_allocation' switches us back to that
2126 obstack while we process this function's parameters. */
2127 resume_temporary_allocation ();
2128
2129 /* Process all parameters of the function. */
2130 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2131 {
5a73491b
RK
2132 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2133 valid_only);
2134 instantiate_decl (DECL_INCOMING_RTL (decl),
2135 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
6f086dfc
RS
2136 }
2137
2138 /* Now process all variables defined in the function or its subblocks. */
2139 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2140
a82ad570 2141 if (DECL_INLINE (fndecl))
6f086dfc
RS
2142 {
2143 /* Save all rtl allocated for this function by raising the
2144 high-water mark on the maybepermanent_obstack. */
2145 preserve_data ();
2146 /* All further rtl allocation is now done in the current_obstack. */
2147 rtl_in_current_obstack ();
2148 }
2149}
2150
2151/* Subroutine of instantiate_decls: Process all decls in the given
2152 BLOCK node and all its subblocks. */
2153
2154static void
2155instantiate_decls_1 (let, valid_only)
2156 tree let;
2157 int valid_only;
2158{
2159 tree t;
2160
2161 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
5a73491b
RK
2162 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2163 valid_only);
6f086dfc
RS
2164
2165 /* Process all subblocks. */
2166 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2167 instantiate_decls_1 (t, valid_only);
2168}
5a73491b 2169
8008b228 2170/* Subroutine of the preceding procedures: Given RTL representing a
5a73491b
RK
2171 decl and the size of the object, do any instantiation required.
2172
2173 If VALID_ONLY is non-zero, it means that the RTL should only be
2174 changed if the new address is valid. */
2175
2176static void
2177instantiate_decl (x, size, valid_only)
2178 rtx x;
2179 int size;
2180 int valid_only;
2181{
2182 enum machine_mode mode;
2183 rtx addr;
2184
2185 /* If this is not a MEM, no need to do anything. Similarly if the
2186 address is a constant or a register that is not a virtual register. */
2187
2188 if (x == 0 || GET_CODE (x) != MEM)
2189 return;
2190
2191 addr = XEXP (x, 0);
2192 if (CONSTANT_P (addr)
2193 || (GET_CODE (addr) == REG
2194 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2195 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2196 return;
2197
2198 /* If we should only do this if the address is valid, copy the address.
2199 We need to do this so we can undo any changes that might make the
2200 address invalid. This copy is unfortunate, but probably can't be
2201 avoided. */
2202
2203 if (valid_only)
2204 addr = copy_rtx (addr);
2205
2206 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2207
2208 if (! valid_only)
2209 return;
2210
2211 /* Now verify that the resulting address is valid for every integer or
2212 floating-point mode up to and including SIZE bytes long. We do this
2213 since the object might be accessed in any mode and frame addresses
2214 are shared. */
2215
2216 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2217 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2218 mode = GET_MODE_WIDER_MODE (mode))
2219 if (! memory_address_p (mode, addr))
2220 return;
2221
2222 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2223 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2224 mode = GET_MODE_WIDER_MODE (mode))
2225 if (! memory_address_p (mode, addr))
2226 return;
2227
2228 /* Otherwise, put back the address, now that we have updated it and we
2229 know it is valid. */
2230
2231 XEXP (x, 0) = addr;
2232}
6f086dfc
RS
2233\f
2234/* Given a pointer to a piece of rtx and an optional pointer to the
2235 containing object, instantiate any virtual registers present in it.
2236
2237 If EXTRA_INSNS, we always do the replacement and generate
2238 any extra insns before OBJECT. If it zero, we do nothing if replacement
2239 is not valid.
2240
2241 Return 1 if we either had nothing to do or if we were able to do the
2242 needed replacement. Return 0 otherwise; we only return zero if
2243 EXTRA_INSNS is zero.
2244
2245 We first try some simple transformations to avoid the creation of extra
2246 pseudos. */
2247
2248static int
2249instantiate_virtual_regs_1 (loc, object, extra_insns)
2250 rtx *loc;
2251 rtx object;
2252 int extra_insns;
2253{
2254 rtx x;
2255 RTX_CODE code;
2256 rtx new = 0;
2257 int offset;
2258 rtx temp;
2259 rtx seq;
2260 int i, j;
2261 char *fmt;
2262
2263 /* Re-start here to avoid recursion in common cases. */
2264 restart:
2265
2266 x = *loc;
2267 if (x == 0)
2268 return 1;
2269
2270 code = GET_CODE (x);
2271
2272 /* Check for some special cases. */
2273 switch (code)
2274 {
2275 case CONST_INT:
2276 case CONST_DOUBLE:
2277 case CONST:
2278 case SYMBOL_REF:
2279 case CODE_LABEL:
2280 case PC:
2281 case CC0:
2282 case ASM_INPUT:
2283 case ADDR_VEC:
2284 case ADDR_DIFF_VEC:
2285 case RETURN:
2286 return 1;
2287
2288 case SET:
2289 /* We are allowed to set the virtual registers. This means that
2290 that the actual register should receive the source minus the
2291 appropriate offset. This is used, for example, in the handling
2292 of non-local gotos. */
2293 if (SET_DEST (x) == virtual_incoming_args_rtx)
2294 new = arg_pointer_rtx, offset = - in_arg_offset;
2295 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2296 new = frame_pointer_rtx, offset = - var_offset;
2297 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2298 new = stack_pointer_rtx, offset = - dynamic_offset;
2299 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2300 new = stack_pointer_rtx, offset = - out_arg_offset;
2301
2302 if (new)
2303 {
2304 /* The only valid sources here are PLUS or REG. Just do
2305 the simplest possible thing to handle them. */
2306 if (GET_CODE (SET_SRC (x)) != REG
2307 && GET_CODE (SET_SRC (x)) != PLUS)
2308 abort ();
2309
2310 start_sequence ();
2311 if (GET_CODE (SET_SRC (x)) != REG)
5f4f0e22 2312 temp = force_operand (SET_SRC (x), NULL_RTX);
6f086dfc
RS
2313 else
2314 temp = SET_SRC (x);
5f4f0e22 2315 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
6f086dfc
RS
2316 seq = get_insns ();
2317 end_sequence ();
2318
2319 emit_insns_before (seq, object);
2320 SET_DEST (x) = new;
2321
2322 if (!validate_change (object, &SET_SRC (x), temp, 0)
2323 || ! extra_insns)
2324 abort ();
2325
2326 return 1;
2327 }
2328
2329 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2330 loc = &SET_SRC (x);
2331 goto restart;
2332
2333 case PLUS:
2334 /* Handle special case of virtual register plus constant. */
2335 if (CONSTANT_P (XEXP (x, 1)))
2336 {
2337 rtx old;
2338
2339 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2340 if (GET_CODE (XEXP (x, 0)) == PLUS)
2341 {
2342 rtx inner = XEXP (XEXP (x, 0), 0);
2343
2344 if (inner == virtual_incoming_args_rtx)
2345 new = arg_pointer_rtx, offset = in_arg_offset;
2346 else if (inner == virtual_stack_vars_rtx)
2347 new = frame_pointer_rtx, offset = var_offset;
2348 else if (inner == virtual_stack_dynamic_rtx)
2349 new = stack_pointer_rtx, offset = dynamic_offset;
2350 else if (inner == virtual_outgoing_args_rtx)
2351 new = stack_pointer_rtx, offset = out_arg_offset;
2352 else
2353 {
2354 loc = &XEXP (x, 0);
2355 goto restart;
2356 }
2357
2358 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2359 extra_insns);
2360 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2361 }
2362
2363 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2364 new = arg_pointer_rtx, offset = in_arg_offset;
2365 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2366 new = frame_pointer_rtx, offset = var_offset;
2367 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2368 new = stack_pointer_rtx, offset = dynamic_offset;
2369 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2370 new = stack_pointer_rtx, offset = out_arg_offset;
2371 else
2372 {
2373 /* We know the second operand is a constant. Unless the
2374 first operand is a REG (which has been already checked),
2375 it needs to be checked. */
2376 if (GET_CODE (XEXP (x, 0)) != REG)
2377 {
2378 loc = &XEXP (x, 0);
2379 goto restart;
2380 }
2381 return 1;
2382 }
2383
2384 old = XEXP (x, 0);
2385 XEXP (x, 0) = new;
2386 new = plus_constant (XEXP (x, 1), offset);
2387
2388 /* If the new constant is zero, try to replace the sum with its
2389 first operand. */
2390 if (new == const0_rtx
2391 && validate_change (object, loc, XEXP (x, 0), 0))
2392 return 1;
2393
2394 /* Next try to replace constant with new one. */
2395 if (!validate_change (object, &XEXP (x, 1), new, 0))
2396 {
2397 if (! extra_insns)
2398 {
2399 XEXP (x, 0) = old;
2400 return 0;
2401 }
2402
2403 /* Otherwise copy the new constant into a register and replace
2404 constant with that register. */
2405 temp = gen_reg_rtx (Pmode);
2406 if (validate_change (object, &XEXP (x, 1), temp, 0))
2407 emit_insn_before (gen_move_insn (temp, new), object);
2408 else
2409 {
2410 /* If that didn't work, replace this expression with a
2411 register containing the sum. */
2412
2413 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2414 XEXP (x, 0) = old;
2415
2416 start_sequence ();
5f4f0e22 2417 temp = force_operand (new, NULL_RTX);
6f086dfc
RS
2418 seq = get_insns ();
2419 end_sequence ();
2420
2421 emit_insns_before (seq, object);
2422 if (! validate_change (object, loc, temp, 0)
2423 && ! validate_replace_rtx (x, temp, object))
2424 abort ();
2425 }
2426 }
2427
2428 return 1;
2429 }
2430
2431 /* Fall through to generic two-operand expression case. */
2432 case EXPR_LIST:
2433 case CALL:
2434 case COMPARE:
2435 case MINUS:
2436 case MULT:
2437 case DIV: case UDIV:
2438 case MOD: case UMOD:
2439 case AND: case IOR: case XOR:
2440 case LSHIFT: case ASHIFT: case ROTATE:
2441 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2442 case NE: case EQ:
2443 case GE: case GT: case GEU: case GTU:
2444 case LE: case LT: case LEU: case LTU:
2445 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2446 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2447 loc = &XEXP (x, 0);
2448 goto restart;
2449
2450 case MEM:
2451 /* Most cases of MEM that convert to valid addresses have already been
2452 handled by our scan of regno_reg_rtx. The only special handling we
2453 need here is to make a copy of the rtx to ensure it isn't being
b335c2cc 2454 shared if we have to change it to a pseudo.
6f086dfc
RS
2455
2456 If the rtx is a simple reference to an address via a virtual register,
2457 it can potentially be shared. In such cases, first try to make it
2458 a valid address, which can also be shared. Otherwise, copy it and
2459 proceed normally.
2460
2461 First check for common cases that need no processing. These are
2462 usually due to instantiation already being done on a previous instance
2463 of a shared rtx. */
2464
2465 temp = XEXP (x, 0);
2466 if (CONSTANT_ADDRESS_P (temp)
2467#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2468 || temp == arg_pointer_rtx
2469#endif
2470 || temp == frame_pointer_rtx)
2471 return 1;
2472
2473 if (GET_CODE (temp) == PLUS
2474 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2475 && (XEXP (temp, 0) == frame_pointer_rtx
2476#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2477 || XEXP (temp, 0) == arg_pointer_rtx
2478#endif
2479 ))
2480 return 1;
2481
2482 if (temp == virtual_stack_vars_rtx
2483 || temp == virtual_incoming_args_rtx
2484 || (GET_CODE (temp) == PLUS
2485 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2486 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2487 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2488 {
2489 /* This MEM may be shared. If the substitution can be done without
2490 the need to generate new pseudos, we want to do it in place
2491 so all copies of the shared rtx benefit. The call below will
2492 only make substitutions if the resulting address is still
2493 valid.
2494
2495 Note that we cannot pass X as the object in the recursive call
2496 since the insn being processed may not allow all valid
6461be14
RS
2497 addresses. However, if we were not passed on object, we can
2498 only modify X without copying it if X will have a valid
2499 address.
6f086dfc 2500
6461be14
RS
2501 ??? Also note that this can still lose if OBJECT is an insn that
2502 has less restrictions on an address that some other insn.
2503 In that case, we will modify the shared address. This case
2504 doesn't seem very likely, though. */
2505
2506 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2507 object ? object : x, 0))
6f086dfc
RS
2508 return 1;
2509
2510 /* Otherwise make a copy and process that copy. We copy the entire
2511 RTL expression since it might be a PLUS which could also be
2512 shared. */
2513 *loc = x = copy_rtx (x);
2514 }
2515
2516 /* Fall through to generic unary operation case. */
2517 case USE:
2518 case CLOBBER:
2519 case SUBREG:
2520 case STRICT_LOW_PART:
2521 case NEG: case NOT:
2522 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2523 case SIGN_EXTEND: case ZERO_EXTEND:
2524 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2525 case FLOAT: case FIX:
2526 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2527 case ABS:
2528 case SQRT:
2529 case FFS:
2530 /* These case either have just one operand or we know that we need not
2531 check the rest of the operands. */
2532 loc = &XEXP (x, 0);
2533 goto restart;
2534
2535 case REG:
2536 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2537 in front of this insn and substitute the temporary. */
2538 if (x == virtual_incoming_args_rtx)
2539 new = arg_pointer_rtx, offset = in_arg_offset;
2540 else if (x == virtual_stack_vars_rtx)
2541 new = frame_pointer_rtx, offset = var_offset;
2542 else if (x == virtual_stack_dynamic_rtx)
2543 new = stack_pointer_rtx, offset = dynamic_offset;
2544 else if (x == virtual_outgoing_args_rtx)
2545 new = stack_pointer_rtx, offset = out_arg_offset;
2546
2547 if (new)
2548 {
2549 temp = plus_constant (new, offset);
2550 if (!validate_change (object, loc, temp, 0))
2551 {
2552 if (! extra_insns)
2553 return 0;
2554
2555 start_sequence ();
5f4f0e22 2556 temp = force_operand (temp, NULL_RTX);
6f086dfc
RS
2557 seq = get_insns ();
2558 end_sequence ();
2559
2560 emit_insns_before (seq, object);
2561 if (! validate_change (object, loc, temp, 0)
2562 && ! validate_replace_rtx (x, temp, object))
2563 abort ();
2564 }
2565 }
2566
2567 return 1;
2568 }
2569
2570 /* Scan all subexpressions. */
2571 fmt = GET_RTX_FORMAT (code);
2572 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2573 if (*fmt == 'e')
2574 {
2575 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2576 return 0;
2577 }
2578 else if (*fmt == 'E')
2579 for (j = 0; j < XVECLEN (x, i); j++)
2580 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2581 extra_insns))
2582 return 0;
2583
2584 return 1;
2585}
2586\f
2587/* Optimization: assuming this function does not receive nonlocal gotos,
2588 delete the handlers for such, as well as the insns to establish
2589 and disestablish them. */
2590
2591static void
2592delete_handlers ()
2593{
2594 rtx insn;
2595 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2596 {
2597 /* Delete the handler by turning off the flag that would
2598 prevent jump_optimize from deleting it.
2599 Also permit deletion of the nonlocal labels themselves
2600 if nothing local refers to them. */
2601 if (GET_CODE (insn) == CODE_LABEL)
2602 LABEL_PRESERVE_P (insn) = 0;
2603 if (GET_CODE (insn) == INSN
59257ff7
RK
2604 && ((nonlocal_goto_handler_slot != 0
2605 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
2606 || (nonlocal_goto_stack_level != 0
2607 && reg_mentioned_p (nonlocal_goto_stack_level,
2608 PATTERN (insn)))))
6f086dfc
RS
2609 delete_insn (insn);
2610 }
2611}
2612
2613/* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2614 of the current function. */
2615
2616rtx
2617nonlocal_label_rtx_list ()
2618{
2619 tree t;
2620 rtx x = 0;
2621
2622 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2623 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2624
2625 return x;
2626}
2627\f
2628/* Output a USE for any register use in RTL.
2629 This is used with -noreg to mark the extent of lifespan
2630 of any registers used in a user-visible variable's DECL_RTL. */
2631
2632void
2633use_variable (rtl)
2634 rtx rtl;
2635{
2636 if (GET_CODE (rtl) == REG)
2637 /* This is a register variable. */
2638 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2639 else if (GET_CODE (rtl) == MEM
2640 && GET_CODE (XEXP (rtl, 0)) == REG
2641 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2642 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2643 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2644 /* This is a variable-sized structure. */
2645 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2646}
2647
2648/* Like use_variable except that it outputs the USEs after INSN
2649 instead of at the end of the insn-chain. */
2650
2651void
2652use_variable_after (rtl, insn)
2653 rtx rtl, insn;
2654{
2655 if (GET_CODE (rtl) == REG)
2656 /* This is a register variable. */
2657 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2658 else if (GET_CODE (rtl) == MEM
2659 && GET_CODE (XEXP (rtl, 0)) == REG
2660 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2661 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2662 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2663 /* This is a variable-sized structure. */
2664 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2665}
2666\f
2667int
2668max_parm_reg_num ()
2669{
2670 return max_parm_reg;
2671}
2672
2673/* Return the first insn following those generated by `assign_parms'. */
2674
2675rtx
2676get_first_nonparm_insn ()
2677{
2678 if (last_parm_insn)
2679 return NEXT_INSN (last_parm_insn);
2680 return get_insns ();
2681}
2682
5378192b
RS
2683/* Return the first NOTE_INSN_BLOCK_BEG note in the function.
2684 Crash if there is none. */
2685
2686rtx
2687get_first_block_beg ()
2688{
2689 register rtx searcher;
2690 register rtx insn = get_first_nonparm_insn ();
2691
2692 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
2693 if (GET_CODE (searcher) == NOTE
2694 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
2695 return searcher;
2696
2697 abort (); /* Invalid call to this function. (See comments above.) */
2698 return NULL_RTX;
2699}
2700
6f086dfc
RS
2701/* Return 1 if EXP returns an aggregate value, for which an address
2702 must be passed to the function or returned by the function. */
2703
2704int
2705aggregate_value_p (exp)
2706 tree exp;
2707{
9d790a4f
RS
2708 int i, regno, nregs;
2709 rtx reg;
6f086dfc
RS
2710 if (RETURN_IN_MEMORY (TREE_TYPE (exp)))
2711 return 1;
2712 if (flag_pcc_struct_return
2713 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
c1b98a95 2714 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
2a64e92d
RK
2715 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
2716 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE))
6f086dfc 2717 return 1;
9d790a4f
RS
2718 /* Make sure we have suitable call-clobbered regs to return
2719 the value in; if not, we must return it in memory. */
2720 reg = hard_function_value (TREE_TYPE (exp), 0);
2721 regno = REGNO (reg);
2722 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (TREE_TYPE (exp)));
2723 for (i = 0; i < nregs; i++)
2724 if (! call_used_regs[regno + i])
2725 return 1;
6f086dfc
RS
2726 return 0;
2727}
2728\f
2729/* Assign RTL expressions to the function's parameters.
2730 This may involve copying them into registers and using
2731 those registers as the RTL for them.
2732
2733 If SECOND_TIME is non-zero it means that this function is being
2734 called a second time. This is done by integrate.c when a function's
2735 compilation is deferred. We need to come back here in case the
2736 FUNCTION_ARG macro computes items needed for the rest of the compilation
2737 (such as changing which registers are fixed or caller-saved). But suppress
2738 writing any insns or setting DECL_RTL of anything in this case. */
2739
2740void
2741assign_parms (fndecl, second_time)
2742 tree fndecl;
2743 int second_time;
2744{
2745 register tree parm;
2746 register rtx entry_parm = 0;
2747 register rtx stack_parm = 0;
2748 CUMULATIVE_ARGS args_so_far;
a53e14c0 2749 enum machine_mode promoted_mode, passed_mode, nominal_mode;
00d8a4c1 2750 int unsignedp;
6f086dfc
RS
2751 /* Total space needed so far for args on the stack,
2752 given as a constant and a tree-expression. */
2753 struct args_size stack_args_size;
2754 tree fntype = TREE_TYPE (fndecl);
2755 tree fnargs = DECL_ARGUMENTS (fndecl);
2756 /* This is used for the arg pointer when referring to stack args. */
2757 rtx internal_arg_pointer;
2758 /* This is a dummy PARM_DECL that we used for the function result if
2759 the function returns a structure. */
2760 tree function_result_decl = 0;
2761 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2762 int varargs_setup = 0;
3412b298 2763 rtx conversion_insns = 0;
de3ab9df
RS
2764 /* FUNCTION_ARG may look at this variable. Since this is not
2765 expanding a call it will always be zero in this function. */
2766 int current_call_is_indirect = 0;
6f086dfc
RS
2767
2768 /* Nonzero if the last arg is named `__builtin_va_alist',
2769 which is used on some machines for old-fashioned non-ANSI varargs.h;
2770 this should be stuck onto the stack as if it had arrived there. */
2771 int vararg
2772 = (fnargs
2773 && (parm = tree_last (fnargs)) != 0
2774 && DECL_NAME (parm)
2775 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2776 "__builtin_va_alist")));
2777
2778 /* Nonzero if function takes extra anonymous args.
2779 This means the last named arg must be on the stack
2780 right before the anonymous ones. */
2781 int stdarg
2782 = (TYPE_ARG_TYPES (fntype) != 0
2783 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2784 != void_type_node));
2785
2786 /* If the reg that the virtual arg pointer will be translated into is
2787 not a fixed reg or is the stack pointer, make a copy of the virtual
2788 arg pointer, and address parms via the copy. The frame pointer is
2789 considered fixed even though it is not marked as such.
2790
2791 The second time through, simply use ap to avoid generating rtx. */
2792
2793 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2794 || ! (fixed_regs[ARG_POINTER_REGNUM]
2795 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2796 && ! second_time)
2797 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2798 else
2799 internal_arg_pointer = virtual_incoming_args_rtx;
2800 current_function_internal_arg_pointer = internal_arg_pointer;
2801
2802 stack_args_size.constant = 0;
2803 stack_args_size.var = 0;
2804
2805 /* If struct value address is treated as the first argument, make it so. */
2806 if (aggregate_value_p (DECL_RESULT (fndecl))
2807 && ! current_function_returns_pcc_struct
2808 && struct_value_incoming_rtx == 0)
2809 {
2810 tree type = build_pointer_type (fntype);
2811
5f4f0e22 2812 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
6f086dfc
RS
2813
2814 DECL_ARG_TYPE (function_result_decl) = type;
2815 TREE_CHAIN (function_result_decl) = fnargs;
2816 fnargs = function_result_decl;
2817 }
2818
2819 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2820 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2821
2822#ifdef INIT_CUMULATIVE_INCOMING_ARGS
ea0d4c4b 2823 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
6f086dfc 2824#else
ea0d4c4b 2825 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX);
6f086dfc
RS
2826#endif
2827
2828 /* We haven't yet found an argument that we must push and pretend the
2829 caller did. */
2830 current_function_pretend_args_size = 0;
2831
2832 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2833 {
2834 int aggregate
2835 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2836 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
c1b98a95
RK
2837 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE
2838 || TREE_CODE (TREE_TYPE (parm)) == QUAL_UNION_TYPE);
6f086dfc
RS
2839 struct args_size stack_offset;
2840 struct args_size arg_size;
2841 int passed_pointer = 0;
2842 tree passed_type = DECL_ARG_TYPE (parm);
2843
2844 /* Set LAST_NAMED if this is last named arg before some
2845 anonymous args. We treat it as if it were anonymous too. */
2846 int last_named = ((TREE_CHAIN (parm) == 0
2847 || DECL_NAME (TREE_CHAIN (parm)) == 0)
2848 && (vararg || stdarg));
2849
2850 if (TREE_TYPE (parm) == error_mark_node
2851 /* This can happen after weird syntax errors
2852 or if an enum type is defined among the parms. */
2853 || TREE_CODE (parm) != PARM_DECL
2854 || passed_type == NULL)
2855 {
587cb682
TW
2856 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
2857 const0_rtx);
6f086dfc
RS
2858 TREE_USED (parm) = 1;
2859 continue;
2860 }
2861
2862 /* For varargs.h function, save info about regs and stack space
2863 used by the individual args, not including the va_alist arg. */
2864 if (vararg && last_named)
2865 current_function_args_info = args_so_far;
2866
2867 /* Find mode of arg as it is passed, and mode of arg
2868 as it should be during execution of this function. */
2869 passed_mode = TYPE_MODE (passed_type);
2870 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
2871
16bae307
RS
2872 /* If the parm's mode is VOID, its value doesn't matter,
2873 and avoid the usual things like emit_move_insn that could crash. */
2874 if (nominal_mode == VOIDmode)
2875 {
2876 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
2877 continue;
2878 }
2879
6f086dfc
RS
2880#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2881 /* See if this arg was passed by invisible reference. */
2882 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2883 passed_type, ! last_named))
2884 {
2885 passed_type = build_pointer_type (passed_type);
2886 passed_pointer = 1;
2887 passed_mode = nominal_mode = Pmode;
2888 }
2889#endif
2890
a53e14c0
RK
2891 promoted_mode = passed_mode;
2892
2893#ifdef PROMOTE_FUNCTION_ARGS
2894 /* Compute the mode in which the arg is actually extended to. */
2895 if (TREE_CODE (passed_type) == INTEGER_TYPE
2896 || TREE_CODE (passed_type) == ENUMERAL_TYPE
2897 || TREE_CODE (passed_type) == BOOLEAN_TYPE
2898 || TREE_CODE (passed_type) == CHAR_TYPE
2899 || TREE_CODE (passed_type) == REAL_TYPE
2900 || TREE_CODE (passed_type) == POINTER_TYPE
2901 || TREE_CODE (passed_type) == OFFSET_TYPE)
2902 {
2903 unsignedp = TREE_UNSIGNED (passed_type);
2904 PROMOTE_MODE (promoted_mode, unsignedp, passed_type);
2905 }
2906#endif
2907
6f086dfc
RS
2908 /* Let machine desc say which reg (if any) the parm arrives in.
2909 0 means it arrives on the stack. */
2910#ifdef FUNCTION_INCOMING_ARG
a53e14c0 2911 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
6f086dfc
RS
2912 passed_type, ! last_named);
2913#else
a53e14c0 2914 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
6f086dfc
RS
2915 passed_type, ! last_named);
2916#endif
2917
a53e14c0
RK
2918 if (entry_parm)
2919 passed_mode = promoted_mode;
2920
6f086dfc
RS
2921#ifdef SETUP_INCOMING_VARARGS
2922 /* If this is the last named parameter, do any required setup for
2923 varargs or stdargs. We need to know about the case of this being an
2924 addressable type, in which case we skip the registers it
2925 would have arrived in.
2926
2927 For stdargs, LAST_NAMED will be set for two parameters, the one that
2928 is actually the last named, and the dummy parameter. We only
2929 want to do this action once.
2930
2931 Also, indicate when RTL generation is to be suppressed. */
2932 if (last_named && !varargs_setup)
2933 {
2934 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
2935 current_function_pretend_args_size,
2936 second_time);
2937 varargs_setup = 1;
2938 }
2939#endif
2940
2941 /* Determine parm's home in the stack,
2942 in case it arrives in the stack or we should pretend it did.
2943
2944 Compute the stack position and rtx where the argument arrives
2945 and its size.
2946
2947 There is one complexity here: If this was a parameter that would
2948 have been passed in registers, but wasn't only because it is
2949 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2950 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2951 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2952 0 as it was the previous time. */
2953
2954 locate_and_pad_parm (passed_mode, passed_type,
2955#ifdef STACK_PARMS_IN_REG_PARM_AREA
2956 1,
2957#else
2958#ifdef FUNCTION_INCOMING_ARG
2959 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2960 passed_type,
2961 (! last_named
2962 || varargs_setup)) != 0,
2963#else
2964 FUNCTION_ARG (args_so_far, passed_mode,
2965 passed_type,
2966 ! last_named || varargs_setup) != 0,
2967#endif
2968#endif
2969 fndecl, &stack_args_size, &stack_offset, &arg_size);
2970
2971 if (! second_time)
2972 {
2973 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
2974
2975 if (offset_rtx == const0_rtx)
2976 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
2977 else
2978 stack_parm = gen_rtx (MEM, passed_mode,
2979 gen_rtx (PLUS, Pmode,
2980 internal_arg_pointer, offset_rtx));
2981
2982 /* If this is a memory ref that contains aggregate components,
2983 mark it as such for cse and loop optimize. */
2984 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2985 }
2986
2987 /* If this parameter was passed both in registers and in the stack,
2988 use the copy on the stack. */
2989 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
2990 entry_parm = 0;
2991
461beb10 2992#ifdef FUNCTION_ARG_PARTIAL_NREGS
6f086dfc
RS
2993 /* If this parm was passed part in regs and part in memory,
2994 pretend it arrived entirely in memory
2995 by pushing the register-part onto the stack.
2996
2997 In the special case of a DImode or DFmode that is split,
2998 we could put it together in a pseudoreg directly,
2999 but for now that's not worth bothering with. */
3000
3001 if (entry_parm)
3002 {
461beb10
DE
3003 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
3004 passed_type, ! last_named);
6f086dfc
RS
3005
3006 if (nregs > 0)
3007 {
3008 current_function_pretend_args_size
3009 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3010 / (PARM_BOUNDARY / BITS_PER_UNIT)
3011 * (PARM_BOUNDARY / BITS_PER_UNIT));
3012
3013 if (! second_time)
3014 move_block_from_reg (REGNO (entry_parm),
3015 validize_mem (stack_parm), nregs);
3016 entry_parm = stack_parm;
3017 }
3018 }
461beb10 3019#endif
6f086dfc
RS
3020
3021 /* If we didn't decide this parm came in a register,
3022 by default it came on the stack. */
3023 if (entry_parm == 0)
3024 entry_parm = stack_parm;
3025
3026 /* Record permanently how this parm was passed. */
3027 if (! second_time)
3028 DECL_INCOMING_RTL (parm) = entry_parm;
3029
3030 /* If there is actually space on the stack for this parm,
3031 count it in stack_args_size; otherwise set stack_parm to 0
3032 to indicate there is no preallocated stack slot for the parm. */
3033
3034 if (entry_parm == stack_parm
d9ca49d5 3035#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
6f086dfc 3036 /* On some machines, even if a parm value arrives in a register
d9ca49d5
JW
3037 there is still an (uninitialized) stack slot allocated for it.
3038
3039 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3040 whether this parameter already has a stack slot allocated,
3041 because an arg block exists only if current_function_args_size
3042 is larger than some threshhold, and we haven't calculated that
3043 yet. So, for now, we just assume that stack slots never exist
3044 in this case. */
6f086dfc
RS
3045 || REG_PARM_STACK_SPACE (fndecl) > 0
3046#endif
3047 )
3048 {
3049 stack_args_size.constant += arg_size.constant;
3050 if (arg_size.var)
3051 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3052 }
3053 else
3054 /* No stack slot was pushed for this parm. */
3055 stack_parm = 0;
3056
3057 /* Update info on where next arg arrives in registers. */
3058
3059 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
3060 passed_type, ! last_named);
3061
3062 /* If this is our second time through, we are done with this parm. */
3063 if (second_time)
3064 continue;
3065
e16c591a
RS
3066 /* If we can't trust the parm stack slot to be aligned enough
3067 for its ultimate type, don't use that slot after entry.
3068 We'll make another stack slot, if we need one. */
3069 {
e16c591a
RS
3070 int thisparm_boundary
3071 = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type);
e16c591a
RS
3072
3073 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3074 stack_parm = 0;
3075 }
3076
6f086dfc
RS
3077 /* Now adjust STACK_PARM to the mode and precise location
3078 where this parameter should live during execution,
3079 if we discover that it must live in the stack during execution.
3080 To make debuggers happier on big-endian machines, we store
3081 the value in the last bytes of the space available. */
3082
3083 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3084 && stack_parm != 0)
3085 {
3086 rtx offset_rtx;
3087
3088#if BYTES_BIG_ENDIAN
3089 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3090 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3091 - GET_MODE_SIZE (nominal_mode));
3092#endif
3093
3094 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3095 if (offset_rtx == const0_rtx)
3096 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
3097 else
3098 stack_parm = gen_rtx (MEM, nominal_mode,
3099 gen_rtx (PLUS, Pmode,
3100 internal_arg_pointer, offset_rtx));
3101
3102 /* If this is a memory ref that contains aggregate components,
3103 mark it as such for cse and loop optimize. */
3104 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3105 }
3106
3107 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3108 in the mode in which it arrives.
3109 STACK_PARM is an RTX for a stack slot where the parameter can live
3110 during the function (in case we want to put it there).
3111 STACK_PARM is 0 if no stack slot was pushed for it.
3112
3113 Now output code if necessary to convert ENTRY_PARM to
3114 the type in which this function declares it,
3115 and store that result in an appropriate place,
3116 which may be a pseudo reg, may be STACK_PARM,
3117 or may be a local stack slot if STACK_PARM is 0.
3118
3119 Set DECL_RTL to that place. */
3120
3121 if (nominal_mode == BLKmode)
3122 {
3123 /* If a BLKmode arrives in registers, copy it to a stack slot. */
3124 if (GET_CODE (entry_parm) == REG)
3125 {
3126 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3127 UNITS_PER_WORD);
3128
3129 /* Note that we will be storing an integral number of words.
3130 So we have to be careful to ensure that we allocate an
3131 integral number of words. We do this below in the
3132 assign_stack_local if space was not allocated in the argument
3133 list. If it was, this will not work if PARM_BOUNDARY is not
3134 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3135 if it becomes a problem. */
3136
3137 if (stack_parm == 0)
7e41ffa2
RS
3138 {
3139 stack_parm
3140 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
3141 /* If this is a memory ref that contains aggregate components,
3142 mark it as such for cse and loop optimize. */
3143 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3144 }
3145
6f086dfc
RS
3146 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3147 abort ();
3148
3149 move_block_from_reg (REGNO (entry_parm),
3150 validize_mem (stack_parm),
3151 size_stored / UNITS_PER_WORD);
3152 }
3153 DECL_RTL (parm) = stack_parm;
3154 }
74bd77a8 3155 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
a82ad570 3156 && ! DECL_INLINE (fndecl))
6f086dfc
RS
3157 /* layout_decl may set this. */
3158 || TREE_ADDRESSABLE (parm)
3159 || TREE_SIDE_EFFECTS (parm)
3160 /* If -ffloat-store specified, don't put explicit
3161 float variables into registers. */
3162 || (flag_float_store
3163 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3164 /* Always assign pseudo to structure return or item passed
3165 by invisible reference. */
3166 || passed_pointer || parm == function_result_decl)
3167 {
00d8a4c1
RK
3168 /* Store the parm in a pseudoregister during the function, but we
3169 may need to do it in a wider mode. */
3170
3171 register rtx parmreg;
3172
3173 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3174 if (TREE_CODE (TREE_TYPE (parm)) == INTEGER_TYPE
3175 || TREE_CODE (TREE_TYPE (parm)) == ENUMERAL_TYPE
3176 || TREE_CODE (TREE_TYPE (parm)) == BOOLEAN_TYPE
3177 || TREE_CODE (TREE_TYPE (parm)) == CHAR_TYPE
3178 || TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE
3179 || TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE
3180 || TREE_CODE (TREE_TYPE (parm)) == OFFSET_TYPE)
3181 {
3182 PROMOTE_MODE (nominal_mode, unsignedp, TREE_TYPE (parm));
3183 }
6f086dfc 3184
00d8a4c1 3185 parmreg = gen_reg_rtx (nominal_mode);
6f086dfc
RS
3186 REG_USERVAR_P (parmreg) = 1;
3187
3188 /* If this was an item that we received a pointer to, set DECL_RTL
3189 appropriately. */
3190 if (passed_pointer)
3191 {
3192 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3193 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3194 }
3195 else
3196 DECL_RTL (parm) = parmreg;
3197
3198 /* Copy the value into the register. */
3199 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
86f8eff3
RK
3200 {
3201 /* If ENTRY_PARM is a hard register, it might be in a register
3202 not valid for operating in its mode (e.g., an odd-numbered
3203 register for a DFmode). In that case, moves are the only
3204 thing valid, so we can't do a convert from there. This
3205 occurs when the calling sequence allow such misaligned
3412b298
JW
3206 usages.
3207
3208 In addition, the conversion may involve a call, which could
3209 clobber parameters which haven't been copied to pseudo
3210 registers yet. Therefore, we must first copy the parm to
3211 a pseudo reg here, and save the conversion until after all
3212 parameters have been moved. */
3213
3214 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3215
3216 emit_move_insn (tempreg, validize_mem (entry_parm));
3217
3218 push_to_sequence (conversion_insns);
8c394878 3219 convert_move (parmreg, tempreg, unsignedp);
3412b298
JW
3220 conversion_insns = get_insns ();
3221 end_sequence ();
86f8eff3 3222 }
6f086dfc
RS
3223 else
3224 emit_move_insn (parmreg, validize_mem (entry_parm));
3225
74bd77a8
RS
3226 /* If we were passed a pointer but the actual value
3227 can safely live in a register, put it in one. */
16bae307 3228 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
74bd77a8
RS
3229 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3230 && ! DECL_INLINE (fndecl))
3231 /* layout_decl may set this. */
3232 || TREE_ADDRESSABLE (parm)
3233 || TREE_SIDE_EFFECTS (parm)
3234 /* If -ffloat-store specified, don't put explicit
3235 float variables into registers. */
3236 || (flag_float_store
3237 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3238 {
2654605a
JW
3239 /* We can't use nominal_mode, because it will have been set to
3240 Pmode above. We must use the actual mode of the parm. */
3241 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
74bd77a8
RS
3242 emit_move_insn (parmreg, DECL_RTL (parm));
3243 DECL_RTL (parm) = parmreg;
3244 }
137a2a7b
DE
3245#ifdef FUNCTION_ARG_CALLEE_COPIES
3246 /* If we are passed an arg by reference and it is our responsibility
3247 to make a copy, do it now.
3248 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3249 original argument, so we must recreate them in the call to
3250 FUNCTION_ARG_CALLEE_COPIES. */
3251 /* ??? Later add code to handle the case that if the argument isn't
3252 modified, don't do the copy. */
3253
3254 else if (passed_pointer
3255 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
3256 TYPE_MODE (DECL_ARG_TYPE (parm)),
3257 DECL_ARG_TYPE (parm),
3258 ! last_named))
3259 {
3260 rtx copy;
3261 tree type = DECL_ARG_TYPE (parm);
3262
3263 /* This sequence may involve a library call perhaps clobbering
3264 registers that haven't been copied to pseudos yet. */
3265
3266 push_to_sequence (conversion_insns);
3267
3268 if (TYPE_SIZE (type) == 0
3269 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3270 {
3271 /* This is a variable sized object. */
3272 /* ??? Can we use expr_size here? */
3273 rtx size_rtx = expand_expr (size_in_bytes (type), NULL_RTX,
49640f91 3274 TYPE_MODE (sizetype), 0);
137a2a7b
DE
3275
3276 copy = gen_rtx (MEM, BLKmode,
3277 allocate_dynamic_stack_space (size_rtx, NULL_RTX,
3278 TYPE_ALIGN (type)));
3279 }
3280 else
3281 {
3282 int size = int_size_in_bytes (type);
0b2187a9 3283 copy = assign_stack_temp (TYPE_MODE (type), size, 1);
137a2a7b
DE
3284 }
3285
3286 store_expr (parm, copy, 0);
3287 emit_move_insn (parmreg, XEXP (copy, 0));
3288 conversion_insns = get_insns ();
3289 end_sequence ();
3290 }
3291#endif /* FUNCTION_ARG_CALLEE_COPIES */
74bd77a8 3292
6f086dfc
RS
3293 /* In any case, record the parm's desired stack location
3294 in case we later discover it must live in the stack. */
3295 if (REGNO (parmreg) >= nparmregs)
3296 {
3297 rtx *new;
19fdd3ee 3298 int old_nparmregs = nparmregs;
6f086dfc
RS
3299 nparmregs = REGNO (parmreg) + 5;
3300 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
19fdd3ee
RS
3301 bcopy (parm_reg_stack_loc, new, old_nparmregs * sizeof (rtx));
3302 bzero (new + old_nparmregs, (nparmregs - old_nparmregs) * sizeof (rtx));
6f086dfc
RS
3303 parm_reg_stack_loc = new;
3304 }
3305 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3306
3307 /* Mark the register as eliminable if we did no conversion
3308 and it was copied from memory at a fixed offset,
3309 and the arg pointer was not copied to a pseudo-reg.
3310 If the arg pointer is a pseudo reg or the offset formed
3311 an invalid address, such memory-equivalences
3312 as we make here would screw up life analysis for it. */
3313 if (nominal_mode == passed_mode
3314 && GET_CODE (entry_parm) == MEM
e16c591a 3315 && entry_parm == stack_parm
6f086dfc
RS
3316 && stack_offset.var == 0
3317 && reg_mentioned_p (virtual_incoming_args_rtx,
3318 XEXP (entry_parm, 0)))
3319 REG_NOTES (get_last_insn ())
3320 = gen_rtx (EXPR_LIST, REG_EQUIV,
3321 entry_parm, REG_NOTES (get_last_insn ()));
3322
3323 /* For pointer data type, suggest pointer register. */
3324 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3325 mark_reg_pointer (parmreg);
3326 }
3327 else
3328 {
3329 /* Value must be stored in the stack slot STACK_PARM
3330 during function execution. */
3331
3332 if (passed_mode != nominal_mode)
86f8eff3
RK
3333 {
3334 /* Conversion is required. */
3412b298
JW
3335 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3336
3337 emit_move_insn (tempreg, validize_mem (entry_parm));
86f8eff3 3338
3412b298
JW
3339 push_to_sequence (conversion_insns);
3340 entry_parm = convert_to_mode (nominal_mode, tempreg,
a53e14c0 3341 TREE_UNSIGNED (TREE_TYPE (parm)));
3412b298
JW
3342 conversion_insns = get_insns ();
3343 end_sequence ();
86f8eff3 3344 }
6f086dfc
RS
3345
3346 if (entry_parm != stack_parm)
3347 {
3348 if (stack_parm == 0)
7e41ffa2
RS
3349 {
3350 stack_parm
3351 = assign_stack_local (GET_MODE (entry_parm),
3352 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3353 /* If this is a memory ref that contains aggregate components,
3354 mark it as such for cse and loop optimize. */
3355 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3356 }
3357
3412b298
JW
3358 if (passed_mode != nominal_mode)
3359 {
3360 push_to_sequence (conversion_insns);
3361 emit_move_insn (validize_mem (stack_parm),
3362 validize_mem (entry_parm));
3363 conversion_insns = get_insns ();
3364 end_sequence ();
3365 }
3366 else
3367 emit_move_insn (validize_mem (stack_parm),
3368 validize_mem (entry_parm));
6f086dfc
RS
3369 }
3370
3371 DECL_RTL (parm) = stack_parm;
3372 }
3373
3374 /* If this "parameter" was the place where we are receiving the
3375 function's incoming structure pointer, set up the result. */
3376 if (parm == function_result_decl)
3377 DECL_RTL (DECL_RESULT (fndecl))
3378 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
3379
3380 if (TREE_THIS_VOLATILE (parm))
3381 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
3382 if (TREE_READONLY (parm))
3383 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
3384 }
3385
3412b298
JW
3386 /* Output all parameter conversion instructions (possibly including calls)
3387 now that all parameters have been copied out of hard registers. */
3388 emit_insns (conversion_insns);
3389
6f086dfc
RS
3390 max_parm_reg = max_reg_num ();
3391 last_parm_insn = get_last_insn ();
3392
3393 current_function_args_size = stack_args_size.constant;
3394
3395 /* Adjust function incoming argument size for alignment and
3396 minimum length. */
3397
3398#ifdef REG_PARM_STACK_SPACE
6f90e075 3399#ifndef MAYBE_REG_PARM_STACK_SPACE
6f086dfc
RS
3400 current_function_args_size = MAX (current_function_args_size,
3401 REG_PARM_STACK_SPACE (fndecl));
3402#endif
6f90e075 3403#endif
6f086dfc
RS
3404
3405#ifdef STACK_BOUNDARY
3406#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
3407
3408 current_function_args_size
3409 = ((current_function_args_size + STACK_BYTES - 1)
3410 / STACK_BYTES) * STACK_BYTES;
3411#endif
3412
3413#ifdef ARGS_GROW_DOWNWARD
3414 current_function_arg_offset_rtx
5f4f0e22 3415 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
6f086dfc
RS
3416 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
3417 size_int (-stack_args_size.constant)),
5f4f0e22 3418 NULL_RTX, VOIDmode, 0));
6f086dfc
RS
3419#else
3420 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
3421#endif
3422
3423 /* See how many bytes, if any, of its args a function should try to pop
3424 on return. */
3425
3426 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
3427 current_function_args_size);
3428
3429 /* For stdarg.h function, save info about regs and stack space
3430 used by the named args. */
3431
3432 if (stdarg)
3433 current_function_args_info = args_so_far;
3434
3435 /* Set the rtx used for the function return value. Put this in its
3436 own variable so any optimizers that need this information don't have
3437 to include tree.h. Do this here so it gets done when an inlined
3438 function gets output. */
3439
3440 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
3441}
3442\f
75dc3319
RK
3443/* Indicate whether REGNO is an incoming argument to the current function
3444 that was promoted to a wider mode. If so, return the RTX for the
3445 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3446 that REGNO is promoted from and whether the promotion was signed or
3447 unsigned. */
3448
3449#ifdef PROMOTE_FUNCTION_ARGS
3450
3451rtx
3452promoted_input_arg (regno, pmode, punsignedp)
3453 int regno;
3454 enum machine_mode *pmode;
3455 int *punsignedp;
3456{
3457 tree arg;
3458
3459 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3460 arg = TREE_CHAIN (arg))
3461 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
3462 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3463 && (TREE_CODE (TREE_TYPE (arg)) == INTEGER_TYPE
3464 || TREE_CODE (TREE_TYPE (arg)) == ENUMERAL_TYPE
3465 || TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE
3466 || TREE_CODE (TREE_TYPE (arg)) == CHAR_TYPE
3467 || TREE_CODE (TREE_TYPE (arg)) == REAL_TYPE
3468 || TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE
3469 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE))
3470 {
3471 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3472 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
3473
3474 PROMOTE_MODE (mode, unsignedp, TREE_TYPE (arg));
3475 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3476 && mode != DECL_MODE (arg))
3477 {
3478 *pmode = DECL_MODE (arg);
3479 *punsignedp = unsignedp;
3480 return DECL_INCOMING_RTL (arg);
3481 }
3482 }
3483
3484 return 0;
3485}
3486
3487#endif
3488\f
6f086dfc
RS
3489/* Compute the size and offset from the start of the stacked arguments for a
3490 parm passed in mode PASSED_MODE and with type TYPE.
3491
3492 INITIAL_OFFSET_PTR points to the current offset into the stacked
3493 arguments.
3494
3495 The starting offset and size for this parm are returned in *OFFSET_PTR
3496 and *ARG_SIZE_PTR, respectively.
3497
3498 IN_REGS is non-zero if the argument will be passed in registers. It will
3499 never be set if REG_PARM_STACK_SPACE is not defined.
3500
3501 FNDECL is the function in which the argument was defined.
3502
3503 There are two types of rounding that are done. The first, controlled by
3504 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3505 list to be aligned to the specific boundary (in bits). This rounding
3506 affects the initial and starting offsets, but not the argument size.
3507
3508 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3509 optionally rounds the size of the parm to PARM_BOUNDARY. The
3510 initial offset is not affected by this rounding, while the size always
3511 is and the starting offset may be. */
3512
3513/* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
3514 initial_offset_ptr is positive because locate_and_pad_parm's
3515 callers pass in the total size of args so far as
3516 initial_offset_ptr. arg_size_ptr is always positive.*/
3517
3518static void pad_to_arg_alignment (), pad_below ();
3519
3520void
3521locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
3522 initial_offset_ptr, offset_ptr, arg_size_ptr)
3523 enum machine_mode passed_mode;
3524 tree type;
3525 int in_regs;
3526 tree fndecl;
3527 struct args_size *initial_offset_ptr;
3528 struct args_size *offset_ptr;
3529 struct args_size *arg_size_ptr;
3530{
3531 tree sizetree
3532 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3533 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3534 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3535 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3536 int reg_parm_stack_space = 0;
3537
3538#ifdef REG_PARM_STACK_SPACE
3539 /* If we have found a stack parm before we reach the end of the
3540 area reserved for registers, skip that area. */
3541 if (! in_regs)
3542 {
29008b51
JW
3543#ifdef MAYBE_REG_PARM_STACK_SPACE
3544 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3545#else
6f086dfc 3546 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
29008b51 3547#endif
6f086dfc
RS
3548 if (reg_parm_stack_space > 0)
3549 {
3550 if (initial_offset_ptr->var)
3551 {
3552 initial_offset_ptr->var
3553 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3554 size_int (reg_parm_stack_space));
3555 initial_offset_ptr->constant = 0;
3556 }
3557 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3558 initial_offset_ptr->constant = reg_parm_stack_space;
3559 }
3560 }
3561#endif /* REG_PARM_STACK_SPACE */
3562
3563 arg_size_ptr->var = 0;
3564 arg_size_ptr->constant = 0;
3565
3566#ifdef ARGS_GROW_DOWNWARD
3567 if (initial_offset_ptr->var)
3568 {
3569 offset_ptr->constant = 0;
3570 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3571 initial_offset_ptr->var);
3572 }
3573 else
3574 {
3575 offset_ptr->constant = - initial_offset_ptr->constant;
3576 offset_ptr->var = 0;
3577 }
3578 if (where_pad == upward
3579 && (TREE_CODE (sizetree) != INTEGER_CST
3580 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3581 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3582 SUB_PARM_SIZE (*offset_ptr, sizetree);
66bcbe19
TG
3583 if (where_pad != downward)
3584 pad_to_arg_alignment (offset_ptr, boundary);
6f086dfc
RS
3585 if (initial_offset_ptr->var)
3586 {
3587 arg_size_ptr->var = size_binop (MINUS_EXPR,
3588 size_binop (MINUS_EXPR,
3589 integer_zero_node,
3590 initial_offset_ptr->var),
3591 offset_ptr->var);
3592 }
3593 else
3594 {
3595 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3596 offset_ptr->constant);
3597 }
3598/* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3599 if (where_pad == downward)
3600 pad_below (arg_size_ptr, passed_mode, sizetree);
3601#else /* !ARGS_GROW_DOWNWARD */
3602 pad_to_arg_alignment (initial_offset_ptr, boundary);
3603 *offset_ptr = *initial_offset_ptr;
3604 if (where_pad == downward)
3605 pad_below (offset_ptr, passed_mode, sizetree);
3606
3607#ifdef PUSH_ROUNDING
3608 if (passed_mode != BLKmode)
3609 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3610#endif
3611
3612 if (where_pad != none
3613 && (TREE_CODE (sizetree) != INTEGER_CST
3614 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3615 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3616
3617 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3618#endif /* ARGS_GROW_DOWNWARD */
3619}
3620
e16c591a
RS
3621/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3622 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3623
6f086dfc
RS
3624static void
3625pad_to_arg_alignment (offset_ptr, boundary)
3626 struct args_size *offset_ptr;
3627 int boundary;
3628{
3629 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3630
3631 if (boundary > BITS_PER_UNIT)
3632 {
3633 if (offset_ptr->var)
3634 {
3635 offset_ptr->var =
3636#ifdef ARGS_GROW_DOWNWARD
3637 round_down
3638#else
3639 round_up
3640#endif
3641 (ARGS_SIZE_TREE (*offset_ptr),
3642 boundary / BITS_PER_UNIT);
3643 offset_ptr->constant = 0; /*?*/
3644 }
3645 else
3646 offset_ptr->constant =
3647#ifdef ARGS_GROW_DOWNWARD
3648 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3649#else
3650 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3651#endif
3652 }
3653}
3654
3655static void
3656pad_below (offset_ptr, passed_mode, sizetree)
3657 struct args_size *offset_ptr;
3658 enum machine_mode passed_mode;
3659 tree sizetree;
3660{
3661 if (passed_mode != BLKmode)
3662 {
3663 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3664 offset_ptr->constant
3665 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3666 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3667 - GET_MODE_SIZE (passed_mode));
3668 }
3669 else
3670 {
3671 if (TREE_CODE (sizetree) != INTEGER_CST
3672 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3673 {
3674 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3675 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3676 /* Add it in. */
3677 ADD_PARM_SIZE (*offset_ptr, s2);
3678 SUB_PARM_SIZE (*offset_ptr, sizetree);
3679 }
3680 }
3681}
3682
3683static tree
3684round_down (value, divisor)
3685 tree value;
3686 int divisor;
3687{
3688 return size_binop (MULT_EXPR,
3689 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3690 size_int (divisor));
3691}
3692\f
3693/* Walk the tree of blocks describing the binding levels within a function
3694 and warn about uninitialized variables.
3695 This is done after calling flow_analysis and before global_alloc
3696 clobbers the pseudo-regs to hard regs. */
3697
3698void
3699uninitialized_vars_warning (block)
3700 tree block;
3701{
3702 register tree decl, sub;
3703 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3704 {
3705 if (TREE_CODE (decl) == VAR_DECL
3706 /* These warnings are unreliable for and aggregates
3707 because assigning the fields one by one can fail to convince
3708 flow.c that the entire aggregate was initialized.
3709 Unions are troublesome because members may be shorter. */
3710 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3711 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
c1b98a95 3712 && TREE_CODE (TREE_TYPE (decl)) != QUAL_UNION_TYPE
6f086dfc
RS
3713 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3714 && DECL_RTL (decl) != 0
3715 && GET_CODE (DECL_RTL (decl)) == REG
3716 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3717 warning_with_decl (decl,
3718 "`%s' may be used uninitialized in this function");
3719 if (TREE_CODE (decl) == VAR_DECL
3720 && DECL_RTL (decl) != 0
3721 && GET_CODE (DECL_RTL (decl)) == REG
3722 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3723 warning_with_decl (decl,
f1b985b7 3724 "variable `%s' may be clobbered by `longjmp' or `vfork'");
6f086dfc
RS
3725 }
3726 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3727 uninitialized_vars_warning (sub);
3728}
3729
3730/* Do the appropriate part of uninitialized_vars_warning
3731 but for arguments instead of local variables. */
3732
3733void
3734setjmp_args_warning (block)
3735 tree block;
3736{
3737 register tree decl;
3738 for (decl = DECL_ARGUMENTS (current_function_decl);
3739 decl; decl = TREE_CHAIN (decl))
3740 if (DECL_RTL (decl) != 0
3741 && GET_CODE (DECL_RTL (decl)) == REG
3742 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
f1b985b7 3743 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp' or `vfork'");
6f086dfc
RS
3744}
3745
3746/* If this function call setjmp, put all vars into the stack
3747 unless they were declared `register'. */
3748
3749void
3750setjmp_protect (block)
3751 tree block;
3752{
3753 register tree decl, sub;
3754 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3755 if ((TREE_CODE (decl) == VAR_DECL
3756 || TREE_CODE (decl) == PARM_DECL)
3757 && DECL_RTL (decl) != 0
3758 && GET_CODE (DECL_RTL (decl)) == REG
b335c2cc
TW
3759 /* If this variable came from an inline function, it must be
3760 that it's life doesn't overlap the setjmp. If there was a
3761 setjmp in the function, it would already be in memory. We
3762 must exclude such variable because their DECL_RTL might be
3763 set to strange things such as virtual_stack_vars_rtx. */
3764 && ! DECL_FROM_INLINE (decl)
6f086dfc
RS
3765 && (
3766#ifdef NON_SAVING_SETJMP
3767 /* If longjmp doesn't restore the registers,
3768 don't put anything in them. */
3769 NON_SAVING_SETJMP
3770 ||
3771#endif
a82ad570 3772 ! DECL_REGISTER (decl)))
6f086dfc
RS
3773 put_var_into_stack (decl);
3774 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3775 setjmp_protect (sub);
3776}
3777\f
3778/* Like the previous function, but for args instead of local variables. */
3779
3780void
3781setjmp_protect_args ()
3782{
3783 register tree decl, sub;
3784 for (decl = DECL_ARGUMENTS (current_function_decl);
3785 decl; decl = TREE_CHAIN (decl))
3786 if ((TREE_CODE (decl) == VAR_DECL
3787 || TREE_CODE (decl) == PARM_DECL)
3788 && DECL_RTL (decl) != 0
3789 && GET_CODE (DECL_RTL (decl)) == REG
3790 && (
3791 /* If longjmp doesn't restore the registers,
3792 don't put anything in them. */
3793#ifdef NON_SAVING_SETJMP
3794 NON_SAVING_SETJMP
3795 ||
3796#endif
a82ad570 3797 ! DECL_REGISTER (decl)))
6f086dfc
RS
3798 put_var_into_stack (decl);
3799}
3800\f
3801/* Return the context-pointer register corresponding to DECL,
3802 or 0 if it does not need one. */
3803
3804rtx
3805lookup_static_chain (decl)
3806 tree decl;
3807{
3808 tree context = decl_function_context (decl);
3809 tree link;
3810
3811 if (context == 0)
3812 return 0;
3813
3814 /* We treat inline_function_decl as an alias for the current function
3815 because that is the inline function whose vars, types, etc.
3816 are being merged into the current function.
3817 See expand_inline_function. */
3818 if (context == current_function_decl || context == inline_function_decl)
3819 return virtual_stack_vars_rtx;
3820
3821 for (link = context_display; link; link = TREE_CHAIN (link))
3822 if (TREE_PURPOSE (link) == context)
3823 return RTL_EXPR_RTL (TREE_VALUE (link));
3824
3825 abort ();
3826}
3827\f
3828/* Convert a stack slot address ADDR for variable VAR
3829 (from a containing function)
3830 into an address valid in this function (using a static chain). */
3831
3832rtx
3833fix_lexical_addr (addr, var)
3834 rtx addr;
3835 tree var;
3836{
3837 rtx basereg;
3838 int displacement;
3839 tree context = decl_function_context (var);
3840 struct function *fp;
3841 rtx base = 0;
3842
3843 /* If this is the present function, we need not do anything. */
3844 if (context == current_function_decl || context == inline_function_decl)
3845 return addr;
3846
3847 for (fp = outer_function_chain; fp; fp = fp->next)
3848 if (fp->decl == context)
3849 break;
3850
3851 if (fp == 0)
3852 abort ();
3853
3854 /* Decode given address as base reg plus displacement. */
3855 if (GET_CODE (addr) == REG)
3856 basereg = addr, displacement = 0;
3857 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3858 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3859 else
3860 abort ();
3861
3862 /* We accept vars reached via the containing function's
3863 incoming arg pointer and via its stack variables pointer. */
3864 if (basereg == fp->internal_arg_pointer)
3865 {
3866 /* If reached via arg pointer, get the arg pointer value
3867 out of that function's stack frame.
3868
3869 There are two cases: If a separate ap is needed, allocate a
3870 slot in the outer function for it and dereference it that way.
3871 This is correct even if the real ap is actually a pseudo.
3872 Otherwise, just adjust the offset from the frame pointer to
3873 compensate. */
3874
3875#ifdef NEED_SEPARATE_AP
3876 rtx addr;
3877
3878 if (fp->arg_pointer_save_area == 0)
3879 fp->arg_pointer_save_area
3880 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
3881
3882 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
3883 addr = memory_address (Pmode, addr);
3884
3885 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
3886#else
3887 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
86f8eff3 3888 base = lookup_static_chain (var);
6f086dfc
RS
3889#endif
3890 }
3891
3892 else if (basereg == virtual_stack_vars_rtx)
3893 {
3894 /* This is the same code as lookup_static_chain, duplicated here to
3895 avoid an extra call to decl_function_context. */
3896 tree link;
3897
3898 for (link = context_display; link; link = TREE_CHAIN (link))
3899 if (TREE_PURPOSE (link) == context)
3900 {
3901 base = RTL_EXPR_RTL (TREE_VALUE (link));
3902 break;
3903 }
3904 }
3905
3906 if (base == 0)
3907 abort ();
3908
3909 /* Use same offset, relative to appropriate static chain or argument
3910 pointer. */
3911 return plus_constant (base, displacement);
3912}
3913\f
3914/* Return the address of the trampoline for entering nested fn FUNCTION.
3915 If necessary, allocate a trampoline (in the stack frame)
3916 and emit rtl to initialize its contents (at entry to this function). */
3917
3918rtx
3919trampoline_address (function)
3920 tree function;
3921{
3922 tree link;
3923 tree rtlexp;
3924 rtx tramp;
3925 struct function *fp;
3926 tree fn_context;
3927
3928 /* Find an existing trampoline and return it. */
3929 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3930 if (TREE_PURPOSE (link) == function)
3931 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
3932 for (fp = outer_function_chain; fp; fp = fp->next)
3933 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
3934 if (TREE_PURPOSE (link) == function)
3935 {
3936 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
3937 function);
3938 return round_trampoline_addr (tramp);
3939 }
3940
3941 /* None exists; we must make one. */
3942
3943 /* Find the `struct function' for the function containing FUNCTION. */
3944 fp = 0;
3945 fn_context = decl_function_context (function);
3946 if (fn_context != current_function_decl)
3947 for (fp = outer_function_chain; fp; fp = fp->next)
3948 if (fp->decl == fn_context)
3949 break;
3950
3951 /* Allocate run-time space for this trampoline
3952 (usually in the defining function's stack frame). */
3953#ifdef ALLOCATE_TRAMPOLINE
3954 tramp = ALLOCATE_TRAMPOLINE (fp);
3955#else
3956 /* If rounding needed, allocate extra space
3957 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3958#ifdef TRAMPOLINE_ALIGNMENT
3959#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3960#else
3961#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3962#endif
3963 if (fp != 0)
3964 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
3965 else
3966 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
3967#endif
3968
3969 /* Record the trampoline for reuse and note it for later initialization
3970 by expand_function_end. */
3971 if (fp != 0)
3972 {
3973 push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack);
3974 rtlexp = make_node (RTL_EXPR);
3975 RTL_EXPR_RTL (rtlexp) = tramp;
3976 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
3977 pop_obstacks ();
3978 }
3979 else
3980 {
3981 /* Make the RTL_EXPR node temporary, not momentary, so that the
3982 trampoline_list doesn't become garbage. */
3983 int momentary = suspend_momentary ();
3984 rtlexp = make_node (RTL_EXPR);
3985 resume_momentary (momentary);
3986
3987 RTL_EXPR_RTL (rtlexp) = tramp;
3988 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
3989 }
3990
3991 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
3992 return round_trampoline_addr (tramp);
3993}
3994
3995/* Given a trampoline address,
3996 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3997
3998static rtx
3999round_trampoline_addr (tramp)
4000 rtx tramp;
4001{
4002#ifdef TRAMPOLINE_ALIGNMENT
4003 /* Round address up to desired boundary. */
4004 rtx temp = gen_reg_rtx (Pmode);
4005 temp = expand_binop (Pmode, add_optab, tramp,
5f4f0e22 4006 GEN_INT (TRAMPOLINE_ALIGNMENT - 1),
6f086dfc
RS
4007 temp, 0, OPTAB_LIB_WIDEN);
4008 tramp = expand_binop (Pmode, and_optab, temp,
5f4f0e22 4009 GEN_INT (- TRAMPOLINE_ALIGNMENT),
6f086dfc
RS
4010 temp, 0, OPTAB_LIB_WIDEN);
4011#endif
4012 return tramp;
4013}
4014\f
467456d0
RS
4015/* The functions identify_blocks and reorder_blocks provide a way to
4016 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4017 duplicate portions of the RTL code. Call identify_blocks before
4018 changing the RTL, and call reorder_blocks after. */
4019
4020static int all_blocks ();
4021static tree blocks_nreverse ();
4022
4023/* Put all this function's BLOCK nodes into a vector, and return it.
4024 Also store in each NOTE for the beginning or end of a block
4025 the index of that block in the vector.
4026 The arguments are TOP_BLOCK, the top-level block of the function,
4027 and INSNS, the insn chain of the function. */
4028
4029tree *
4030identify_blocks (top_block, insns)
4031 tree top_block;
4032 rtx insns;
4033{
fc289cd1
JW
4034 int n_blocks;
4035 tree *block_vector;
4036 int *block_stack;
467456d0
RS
4037 int depth = 0;
4038 int next_block_number = 0;
4039 int current_block_number = 0;
4040 rtx insn;
4041
fc289cd1
JW
4042 if (top_block == 0)
4043 return 0;
4044
4045 n_blocks = all_blocks (top_block, 0);
4046 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
4047 block_stack = (int *) alloca (n_blocks * sizeof (int));
4048
467456d0
RS
4049 all_blocks (top_block, block_vector);
4050
4051 for (insn = insns; insn; insn = NEXT_INSN (insn))
4052 if (GET_CODE (insn) == NOTE)
4053 {
4054 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4055 {
4056 block_stack[depth++] = current_block_number;
4057 current_block_number = next_block_number;
1b2ac438 4058 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
467456d0
RS
4059 }
4060 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4061 {
4062 current_block_number = block_stack[--depth];
1b2ac438 4063 NOTE_BLOCK_NUMBER (insn) = current_block_number;
467456d0
RS
4064 }
4065 }
4066
4067 return block_vector;
4068}
4069
4070/* Given BLOCK_VECTOR which was returned by identify_blocks,
4071 and a revised instruction chain, rebuild the tree structure
4072 of BLOCK nodes to correspond to the new order of RTL.
fc289cd1 4073 The new block tree is inserted below TOP_BLOCK.
467456d0
RS
4074 Returns the current top-level block. */
4075
4076tree
fc289cd1 4077reorder_blocks (block_vector, top_block, insns)
467456d0 4078 tree *block_vector;
fc289cd1 4079 tree top_block;
467456d0
RS
4080 rtx insns;
4081{
fc289cd1 4082 tree current_block = top_block;
467456d0
RS
4083 rtx insn;
4084
fc289cd1
JW
4085 if (block_vector == 0)
4086 return top_block;
4087
4088 /* Prune the old tree away, so that it doesn't get in the way. */
4089 BLOCK_SUBBLOCKS (current_block) = 0;
4090
467456d0
RS
4091 for (insn = insns; insn; insn = NEXT_INSN (insn))
4092 if (GET_CODE (insn) == NOTE)
4093 {
4094 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4095 {
4096 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
4097 /* If we have seen this block before, copy it. */
4098 if (TREE_ASM_WRITTEN (block))
4099 block = copy_node (block);
fc289cd1 4100 BLOCK_SUBBLOCKS (block) = 0;
467456d0
RS
4101 TREE_ASM_WRITTEN (block) = 1;
4102 BLOCK_SUPERCONTEXT (block) = current_block;
4103 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4104 BLOCK_SUBBLOCKS (current_block) = block;
4105 current_block = block;
1b2ac438 4106 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
4107 }
4108 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4109 {
4110 BLOCK_SUBBLOCKS (current_block)
4111 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4112 current_block = BLOCK_SUPERCONTEXT (current_block);
1b2ac438 4113 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
4114 }
4115 }
4116
4117 return current_block;
4118}
4119
4120/* Reverse the order of elements in the chain T of blocks,
4121 and return the new head of the chain (old last element). */
4122
4123static tree
4124blocks_nreverse (t)
4125 tree t;
4126{
4127 register tree prev = 0, decl, next;
4128 for (decl = t; decl; decl = next)
4129 {
4130 next = BLOCK_CHAIN (decl);
4131 BLOCK_CHAIN (decl) = prev;
4132 prev = decl;
4133 }
4134 return prev;
4135}
4136
4137/* Count the subblocks of BLOCK, and list them all into the vector VECTOR.
4138 Also clear TREE_ASM_WRITTEN in all blocks. */
4139
4140static int
4141all_blocks (block, vector)
4142 tree block;
4143 tree *vector;
4144{
4145 int n_blocks = 1;
4146 tree subblocks;
4147
4148 TREE_ASM_WRITTEN (block) = 0;
4149 /* Record this block. */
fc289cd1
JW
4150 if (vector)
4151 vector[0] = block;
467456d0
RS
4152
4153 /* Record the subblocks, and their subblocks. */
4154 for (subblocks = BLOCK_SUBBLOCKS (block);
4155 subblocks; subblocks = BLOCK_CHAIN (subblocks))
fc289cd1 4156 n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0);
467456d0
RS
4157
4158 return n_blocks;
4159}
4160\f
6f086dfc
RS
4161/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4162 and initialize static variables for generating RTL for the statements
4163 of the function. */
4164
4165void
4166init_function_start (subr, filename, line)
4167 tree subr;
4168 char *filename;
4169 int line;
4170{
4171 char *junk;
4172
4173 init_stmt_for_function ();
4174
4175 cse_not_expected = ! optimize;
4176
4177 /* Caller save not needed yet. */
4178 caller_save_needed = 0;
4179
4180 /* No stack slots have been made yet. */
4181 stack_slot_list = 0;
4182
4183 /* There is no stack slot for handling nonlocal gotos. */
4184 nonlocal_goto_handler_slot = 0;
4185 nonlocal_goto_stack_level = 0;
4186
4187 /* No labels have been declared for nonlocal use. */
4188 nonlocal_labels = 0;
4189
4190 /* No function calls so far in this function. */
4191 function_call_count = 0;
4192
4193 /* No parm regs have been allocated.
4194 (This is important for output_inline_function.) */
4195 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4196
4197 /* Initialize the RTL mechanism. */
4198 init_emit ();
4199
4200 /* Initialize the queue of pending postincrement and postdecrements,
4201 and some other info in expr.c. */
4202 init_expr ();
4203
4204 /* We haven't done register allocation yet. */
4205 reg_renumber = 0;
4206
4207 init_const_rtx_hash_table ();
4208
4209 current_function_name = (*decl_printable_name) (subr, &junk);
4210
4211 /* Nonzero if this is a nested function that uses a static chain. */
4212
4213 current_function_needs_context
4214 = (decl_function_context (current_function_decl) != 0);
4215
4216 /* Set if a call to setjmp is seen. */
4217 current_function_calls_setjmp = 0;
4218
4219 /* Set if a call to longjmp is seen. */
4220 current_function_calls_longjmp = 0;
4221
4222 current_function_calls_alloca = 0;
4223 current_function_has_nonlocal_label = 0;
4224 current_function_contains_functions = 0;
4225
4226 current_function_returns_pcc_struct = 0;
4227 current_function_returns_struct = 0;
4228 current_function_epilogue_delay_list = 0;
4229 current_function_uses_const_pool = 0;
4230 current_function_uses_pic_offset_table = 0;
4231
4232 /* We have not yet needed to make a label to jump to for tail-recursion. */
4233 tail_recursion_label = 0;
4234
4235 /* We haven't had a need to make a save area for ap yet. */
4236
4237 arg_pointer_save_area = 0;
4238
4239 /* No stack slots allocated yet. */
4240 frame_offset = 0;
4241
4242 /* No SAVE_EXPRs in this function yet. */
4243 save_expr_regs = 0;
4244
4245 /* No RTL_EXPRs in this function yet. */
4246 rtl_expr_chain = 0;
4247
4248 /* We have not allocated any temporaries yet. */
4249 temp_slots = 0;
4250 temp_slot_level = 0;
4251
4252 /* Within function body, compute a type's size as soon it is laid out. */
4253 immediate_size_expand++;
4254
d9a98e1a
RK
4255 /* We haven't made any trampolines for this function yet. */
4256 trampoline_list = 0;
4257
6f086dfc
RS
4258 init_pending_stack_adjust ();
4259 inhibit_defer_pop = 0;
4260
4261 current_function_outgoing_args_size = 0;
4262
4263 /* Initialize the insn lengths. */
4264 init_insn_lengths ();
4265
4266 /* Prevent ever trying to delete the first instruction of a function.
4267 Also tell final how to output a linenum before the function prologue. */
4268 emit_line_note (filename, line);
4269
4270 /* Make sure first insn is a note even if we don't want linenums.
4271 This makes sure the first insn will never be deleted.
4272 Also, final expects a note to appear there. */
5f4f0e22 4273 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
4274
4275 /* Set flags used by final.c. */
4276 if (aggregate_value_p (DECL_RESULT (subr)))
4277 {
4278#ifdef PCC_STATIC_STRUCT_RETURN
1b8297c1 4279 current_function_returns_pcc_struct = 1;
6f086dfc 4280#endif
1b8297c1 4281 current_function_returns_struct = 1;
6f086dfc
RS
4282 }
4283
4284 /* Warn if this value is an aggregate type,
4285 regardless of which calling convention we are using for it. */
4286 if (warn_aggregate_return
4287 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
4288 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
c1b98a95 4289 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == QUAL_UNION_TYPE
6f086dfc
RS
4290 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
4291 warning ("function returns an aggregate");
4292
4293 current_function_returns_pointer
4294 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
4295
4296 /* Indicate that we need to distinguish between the return value of the
4297 present function and the return value of a function being called. */
4298 rtx_equal_function_value_matters = 1;
4299
4300 /* Indicate that we have not instantiated virtual registers yet. */
4301 virtuals_instantiated = 0;
4302
4303 /* Indicate we have no need of a frame pointer yet. */
4304 frame_pointer_needed = 0;
4305
4306 /* By default assume not varargs. */
4307 current_function_varargs = 0;
4308}
4309
4310/* Indicate that the current function uses extra args
4311 not explicitly mentioned in the argument list in any fashion. */
4312
4313void
4314mark_varargs ()
4315{
4316 current_function_varargs = 1;
4317}
4318
4319/* Expand a call to __main at the beginning of a possible main function. */
4320
4321void
4322expand_main_function ()
4323{
b335c2cc 4324#if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
293e3de4 4325 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
6f086dfc 4326 VOIDmode, 0);
b335c2cc 4327#endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
6f086dfc
RS
4328}
4329\f
4330/* Start the RTL for a new function, and set variables used for
4331 emitting RTL.
4332 SUBR is the FUNCTION_DECL node.
4333 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4334 the function's parameters, which must be run at any return statement. */
4335
4336void
4337expand_function_start (subr, parms_have_cleanups)
4338 tree subr;
4339 int parms_have_cleanups;
4340{
4341 register int i;
4342 tree tem;
4343 rtx last_ptr;
4344
4345 /* Make sure volatile mem refs aren't considered
4346 valid operands of arithmetic insns. */
4347 init_recog_no_volatile ();
4348
4349 /* If function gets a static chain arg, store it in the stack frame.
4350 Do this first, so it gets the first stack slot offset. */
4351 if (current_function_needs_context)
3e2481e9
JW
4352 {
4353 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4354 emit_move_insn (last_ptr, static_chain_incoming_rtx);
4355 }
6f086dfc
RS
4356
4357 /* If the parameters of this function need cleaning up, get a label
4358 for the beginning of the code which executes those cleanups. This must
4359 be done before doing anything with return_label. */
4360 if (parms_have_cleanups)
4361 cleanup_label = gen_label_rtx ();
4362 else
4363 cleanup_label = 0;
4364
4365 /* Make the label for return statements to jump to, if this machine
4366 does not have a one-instruction return and uses an epilogue,
4367 or if it returns a structure, or if it has parm cleanups. */
4368#ifdef HAVE_return
4369 if (cleanup_label == 0 && HAVE_return
4370 && ! current_function_returns_pcc_struct
4371 && ! (current_function_returns_struct && ! optimize))
4372 return_label = 0;
4373 else
4374 return_label = gen_label_rtx ();
4375#else
4376 return_label = gen_label_rtx ();
4377#endif
4378
4379 /* Initialize rtx used to return the value. */
4380 /* Do this before assign_parms so that we copy the struct value address
4381 before any library calls that assign parms might generate. */
4382
4383 /* Decide whether to return the value in memory or in a register. */
4384 if (aggregate_value_p (DECL_RESULT (subr)))
4385 {
4386 /* Returning something that won't go in a register. */
4387 register rtx value_address;
4388
4389#ifdef PCC_STATIC_STRUCT_RETURN
4390 if (current_function_returns_pcc_struct)
4391 {
4392 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4393 value_address = assemble_static_space (size);
4394 }
4395 else
4396#endif
4397 {
4398 /* Expect to be passed the address of a place to store the value.
4399 If it is passed as an argument, assign_parms will take care of
4400 it. */
4401 if (struct_value_incoming_rtx)
4402 {
4403 value_address = gen_reg_rtx (Pmode);
4404 emit_move_insn (value_address, struct_value_incoming_rtx);
4405 }
4406 }
4407 if (value_address)
4408 DECL_RTL (DECL_RESULT (subr))
4409 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
4410 value_address);
4411 }
4412 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4413 /* If return mode is void, this decl rtl should not be used. */
4414 DECL_RTL (DECL_RESULT (subr)) = 0;
4415 else if (parms_have_cleanups)
a53e14c0
RK
4416 {
4417 /* If function will end with cleanup code for parms,
4418 compute the return values into a pseudo reg,
4419 which we will copy into the true return register
4420 after the cleanups are done. */
4421
4422 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
4423#ifdef PROMOTE_FUNCTION_RETURN
4424 tree type = TREE_TYPE (DECL_RESULT (subr));
4425 int unsignedp = TREE_UNSIGNED (type);
4426
4427 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
4428 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
4429 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
4430 || TREE_CODE (type) == OFFSET_TYPE)
4431 {
4432 PROMOTE_MODE (mode, unsignedp, type);
4433 }
4434#endif
4435
4436 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
4437 }
6f086dfc
RS
4438 else
4439 /* Scalar, returned in a register. */
4440 {
4441#ifdef FUNCTION_OUTGOING_VALUE
4442 DECL_RTL (DECL_RESULT (subr))
4443 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4444#else
4445 DECL_RTL (DECL_RESULT (subr))
4446 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4447#endif
4448
4449 /* Mark this reg as the function's return value. */
4450 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
4451 {
4452 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
4453 /* Needed because we may need to move this to memory
4454 in case it's a named return value whose address is taken. */
a82ad570 4455 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6f086dfc
RS
4456 }
4457 }
4458
4459 /* Initialize rtx for parameters and local variables.
4460 In some cases this requires emitting insns. */
4461
4462 assign_parms (subr, 0);
4463
4464 /* The following was moved from init_function_start.
4465 The move is supposed to make sdb output more accurate. */
4466 /* Indicate the beginning of the function body,
4467 as opposed to parm setup. */
5f4f0e22 4468 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6f086dfc
RS
4469
4470 /* If doing stupid allocation, mark parms as born here. */
4471
4472 if (GET_CODE (get_last_insn ()) != NOTE)
5f4f0e22 4473 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
4474 parm_birth_insn = get_last_insn ();
4475
4476 if (obey_regdecls)
4477 {
4478 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4479 use_variable (regno_reg_rtx[i]);
4480
4481 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4482 use_variable (current_function_internal_arg_pointer);
4483 }
4484
4485 /* Fetch static chain values for containing functions. */
4486 tem = decl_function_context (current_function_decl);
3e2481e9
JW
4487 /* If not doing stupid register allocation, then start off with the static
4488 chain pointer in a pseudo register. Otherwise, we use the stack
4489 address that was generated above. */
4490 if (tem && ! obey_regdecls)
6f086dfc
RS
4491 last_ptr = copy_to_reg (static_chain_incoming_rtx);
4492 context_display = 0;
4493 while (tem)
4494 {
4495 tree rtlexp = make_node (RTL_EXPR);
4496
4497 RTL_EXPR_RTL (rtlexp) = last_ptr;
4498 context_display = tree_cons (tem, rtlexp, context_display);
4499 tem = decl_function_context (tem);
4500 if (tem == 0)
4501 break;
4502 /* Chain thru stack frames, assuming pointer to next lexical frame
4503 is found at the place we always store it. */
4504#ifdef FRAME_GROWS_DOWNWARD
4505 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
4506#endif
4507 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
4508 memory_address (Pmode, last_ptr)));
4509 }
4510
4511 /* After the display initializations is where the tail-recursion label
4512 should go, if we end up needing one. Ensure we have a NOTE here
4513 since some things (like trampolines) get placed before this. */
5f4f0e22 4514 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
4515
4516 /* Evaluate now the sizes of any types declared among the arguments. */
4517 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
4752d3bc 4518 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6f086dfc
RS
4519
4520 /* Make sure there is a line number after the function entry setup code. */
4521 force_next_line_note ();
4522}
4523\f
4524/* Generate RTL for the end of the current function.
4525 FILENAME and LINE are the current position in the source file. */
4526
4527/* It is up to language-specific callers to do cleanups for parameters. */
4528
4529void
4530expand_function_end (filename, line)
4531 char *filename;
4532 int line;
4533{
4534 register int i;
4535 tree link;
4536
4537 static rtx initial_trampoline;
4538
4539#ifdef NON_SAVING_SETJMP
4540 /* Don't put any variables in registers if we call setjmp
4541 on a machine that fails to restore the registers. */
4542 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
4543 {
4544 setjmp_protect (DECL_INITIAL (current_function_decl));
4545 setjmp_protect_args ();
4546 }
4547#endif
4548
4549 /* Save the argument pointer if a save area was made for it. */
4550 if (arg_pointer_save_area)
4551 {
4552 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
4553 emit_insn_before (x, tail_recursion_reentry);
4554 }
4555
4556 /* Initialize any trampolines required by this function. */
4557 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4558 {
4559 tree function = TREE_PURPOSE (link);
4560 rtx context = lookup_static_chain (function);
4561 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
4562 rtx seq;
4563
4564 /* First make sure this compilation has a template for
4565 initializing trampolines. */
4566 if (initial_trampoline == 0)
86f8eff3
RK
4567 {
4568 end_temporary_allocation ();
4569 initial_trampoline
4570 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
4571 resume_temporary_allocation ();
4572 }
6f086dfc
RS
4573
4574 /* Generate insns to initialize the trampoline. */
4575 start_sequence ();
4576 tramp = change_address (initial_trampoline, BLKmode,
4577 round_trampoline_addr (XEXP (tramp, 0)));
5f4f0e22 4578 emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE),
6f086dfc
RS
4579 FUNCTION_BOUNDARY / BITS_PER_UNIT);
4580 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
4581 XEXP (DECL_RTL (function), 0), context);
4582 seq = get_insns ();
4583 end_sequence ();
4584
4585 /* Put those insns at entry to the containing function (this one). */
4586 emit_insns_before (seq, tail_recursion_reentry);
4587 }
6f086dfc
RS
4588
4589#if 0 /* I think unused parms are legitimate enough. */
4590 /* Warn about unused parms. */
4591 if (warn_unused)
4592 {
4593 rtx decl;
4594
4595 for (decl = DECL_ARGUMENTS (current_function_decl);
4596 decl; decl = TREE_CHAIN (decl))
4597 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
4598 warning_with_decl (decl, "unused parameter `%s'");
4599 }
4600#endif
4601
4602 /* Delete handlers for nonlocal gotos if nothing uses them. */
4603 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
4604 delete_handlers ();
4605
4606 /* End any sequences that failed to be closed due to syntax errors. */
4607 while (in_sequence_p ())
5f4f0e22 4608 end_sequence ();
6f086dfc
RS
4609
4610 /* Outside function body, can't compute type's actual size
4611 until next function's body starts. */
4612 immediate_size_expand--;
4613
4614 /* If doing stupid register allocation,
4615 mark register parms as dying here. */
4616
4617 if (obey_regdecls)
4618 {
4619 rtx tem;
4620 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4621 use_variable (regno_reg_rtx[i]);
4622
4623 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
4624
4625 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
4626 {
4627 use_variable (XEXP (tem, 0));
4628 use_variable_after (XEXP (tem, 0), parm_birth_insn);
4629 }
4630
4631 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4632 use_variable (current_function_internal_arg_pointer);
4633 }
4634
4635 clear_pending_stack_adjust ();
4636 do_pending_stack_adjust ();
4637
4638 /* Mark the end of the function body.
4639 If control reaches this insn, the function can drop through
4640 without returning a value. */
5f4f0e22 4641 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6f086dfc
RS
4642
4643 /* Output a linenumber for the end of the function.
4644 SDB depends on this. */
4645 emit_line_note_force (filename, line);
4646
4647 /* Output the label for the actual return from the function,
4648 if one is expected. This happens either because a function epilogue
4649 is used instead of a return instruction, or because a return was done
4650 with a goto in order to run local cleanups, or because of pcc-style
4651 structure returning. */
4652
4653 if (return_label)
4654 emit_label (return_label);
4655
4656 /* If we had calls to alloca, and this machine needs
4657 an accurate stack pointer to exit the function,
4658 insert some code to save and restore the stack pointer. */
4659#ifdef EXIT_IGNORE_STACK
4660 if (! EXIT_IGNORE_STACK)
4661#endif
4662 if (current_function_calls_alloca)
4663 {
59257ff7
RK
4664 rtx tem = 0;
4665
4666 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5f4f0e22 4667 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6f086dfc
RS
4668 }
4669
4670 /* If scalar return value was computed in a pseudo-reg,
4671 copy that to the hard return register. */
4672 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
4673 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
4674 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
4675 >= FIRST_PSEUDO_REGISTER))
4676 {
4677 rtx real_decl_result;
4678
4679#ifdef FUNCTION_OUTGOING_VALUE
4680 real_decl_result
4681 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4682 current_function_decl);
4683#else
4684 real_decl_result
4685 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4686 current_function_decl);
4687#endif
4688 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
4689 emit_move_insn (real_decl_result,
4690 DECL_RTL (DECL_RESULT (current_function_decl)));
4691 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
4692 }
4693
4694 /* If returning a structure, arrange to return the address of the value
4695 in a place where debuggers expect to find it.
4696
4697 If returning a structure PCC style,
4698 the caller also depends on this value.
4699 And current_function_returns_pcc_struct is not necessarily set. */
4700 if (current_function_returns_struct
4701 || current_function_returns_pcc_struct)
4702 {
4703 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4704 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4705#ifdef FUNCTION_OUTGOING_VALUE
4706 rtx outgoing
4707 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4708 current_function_decl);
4709#else
4710 rtx outgoing
4711 = FUNCTION_VALUE (build_pointer_type (type),
4712 current_function_decl);
4713#endif
4714
4715 /* Mark this as a function return value so integrate will delete the
4716 assignment and USE below when inlining this function. */
4717 REG_FUNCTION_VALUE_P (outgoing) = 1;
4718
4719 emit_move_insn (outgoing, value_address);
4720 use_variable (outgoing);
4721 }
4722
4723 /* Output a return insn if we are using one.
4724 Otherwise, let the rtl chain end here, to drop through
4725 into the epilogue. */
4726
4727#ifdef HAVE_return
4728 if (HAVE_return)
4729 {
4730 emit_jump_insn (gen_return ());
4731 emit_barrier ();
4732 }
4733#endif
4734
4735 /* Fix up any gotos that jumped out to the outermost
4736 binding level of the function.
4737 Must follow emitting RETURN_LABEL. */
4738
4739 /* If you have any cleanups to do at this point,
4740 and they need to create temporary variables,
4741 then you will lose. */
5f4f0e22 4742 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0);
6f086dfc 4743}
bdac5f58
TW
4744\f
4745/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
4746
4747static int *prologue;
4748static int *epilogue;
4749
4750/* Create an array that records the INSN_UIDs of INSNS (either a sequence
4751 or a single insn). */
4752
4753static int *
4754record_insns (insns)
4755 rtx insns;
4756{
4757 int *vec;
4758
4759 if (GET_CODE (insns) == SEQUENCE)
4760 {
4761 int len = XVECLEN (insns, 0);
4762 vec = (int *) oballoc ((len + 1) * sizeof (int));
4763 vec[len] = 0;
4764 while (--len >= 0)
4765 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
4766 }
4767 else
4768 {
4769 vec = (int *) oballoc (2 * sizeof (int));
4770 vec[0] = INSN_UID (insns);
4771 vec[1] = 0;
4772 }
4773 return vec;
4774}
4775
10914065 4776/* Determine how many INSN_UIDs in VEC are part of INSN. */
bdac5f58 4777
10914065 4778static int
bdac5f58
TW
4779contains (insn, vec)
4780 rtx insn;
4781 int *vec;
4782{
4783 register int i, j;
4784
4785 if (GET_CODE (insn) == INSN
4786 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4787 {
10914065 4788 int count = 0;
bdac5f58
TW
4789 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4790 for (j = 0; vec[j]; j++)
4791 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
10914065
TW
4792 count++;
4793 return count;
bdac5f58
TW
4794 }
4795 else
4796 {
4797 for (j = 0; vec[j]; j++)
4798 if (INSN_UID (insn) == vec[j])
10914065 4799 return 1;
bdac5f58
TW
4800 }
4801 return 0;
4802}
4803
4804/* Generate the prologe and epilogue RTL if the machine supports it. Thread
4805 this into place with notes indicating where the prologue ends and where
4806 the epilogue begins. Update the basic block information when possible. */
4807
4808void
4809thread_prologue_and_epilogue_insns (f)
4810 rtx f;
4811{
4812#ifdef HAVE_prologue
4813 if (HAVE_prologue)
4814 {
4815 rtx head, seq, insn;
4816
4817 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
4818 prologue insns and a NOTE_INSN_PROLOGUE_END. */
4819 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
4820 seq = gen_prologue ();
4821 head = emit_insn_after (seq, f);
4822
4823 /* Include the new prologue insns in the first block. Ignore them
4824 if they form a basic block unto themselves. */
4825 if (basic_block_head && n_basic_blocks
4826 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
4827 basic_block_head[0] = NEXT_INSN (f);
4828
4829 /* Retain a map of the prologue insns. */
4830 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
4831 }
4832 else
4833#endif
4834 prologue = 0;
4835
4836#ifdef HAVE_epilogue
4837 if (HAVE_epilogue)
4838 {
4839 rtx insn = get_last_insn ();
4840 rtx prev = prev_nonnote_insn (insn);
4841
4842 /* If we end with a BARRIER, we don't need an epilogue. */
4843 if (! (prev && GET_CODE (prev) == BARRIER))
4844 {
4845 rtx tail, seq;
4846
4847 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG,
4848 the epilogue insns (this must include the jump insn that
4849 returns), USE insns ad the end of a function, and a BARRIER. */
4850
4851 emit_barrier_after (insn);
4852
4853 /* Place the epilogue before the USE insns at the end of a
4854 function. */
4855 while (prev
4856 && GET_CODE (prev) == INSN
4857 && GET_CODE (PATTERN (prev)) == USE)
4858 {
4859 insn = PREV_INSN (prev);
4860 prev = prev_nonnote_insn (prev);
4861 }
4862
4863 seq = gen_epilogue ();
4864 tail = emit_jump_insn_after (seq, insn);
4865 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
4866
4867 /* Include the new epilogue insns in the last block. Ignore
4868 them if they form a basic block unto themselves. */
4869 if (basic_block_end && n_basic_blocks
4870 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
4871 basic_block_end[n_basic_blocks - 1] = tail;
4872
4873 /* Retain a map of the epilogue insns. */
4874 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
4875 return;
4876 }
4877 }
4878#endif
4879 epilogue = 0;
4880}
4881
4882/* Reposition the prologue-end and epilogue-begin notes after instruction
4883 scheduling and delayed branch scheduling. */
4884
4885void
4886reposition_prologue_and_epilogue_notes (f)
4887 rtx f;
4888{
4889#if defined (HAVE_prologue) || defined (HAVE_epilogue)
4890 /* Reposition the prologue and epilogue notes. */
4891 if (n_basic_blocks)
4892 {
4893 rtx next, prev;
bf526252 4894 int len;
bdac5f58
TW
4895
4896 if (prologue)
4897 {
bf526252
RK
4898 register rtx insn, note = 0;
4899
4900 /* Scan from the beginning until we reach the last prologue insn.
4901 We apparently can't depend on basic_block_{head,end} after
4902 reorg has run. */
4903 for (len = 0; prologue[len]; len++)
4904 ;
9392c110
JH
4905 for (insn = f; len && insn; insn = NEXT_INSN (insn))
4906 {
4907 if (GET_CODE (insn) == NOTE)
4908 {
4909 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
4910 note = insn;
4911 }
4912 else if ((len -= contains (insn, prologue)) == 0)
4913 {
4914 /* Find the prologue-end note if we haven't already, and
4915 move it to just after the last prologue insn. */
4916 if (note == 0)
4917 {
4918 for (note = insn; note = NEXT_INSN (note);)
4919 if (GET_CODE (note) == NOTE
4920 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
4921 break;
4922 }
4923 next = NEXT_INSN (note);
4924 prev = PREV_INSN (note);
4925 if (prev)
4926 NEXT_INSN (prev) = next;
4927 if (next)
4928 PREV_INSN (next) = prev;
4929 add_insn_after (note, insn);
4930 }
4931 }
bdac5f58
TW
4932 }
4933
4934 if (epilogue)
4935 {
bf526252
RK
4936 register rtx insn, note = 0;
4937
4938 /* Scan from the end until we reach the first epilogue insn.
4939 We apparently can't depend on basic_block_{head,end} after
4940 reorg has run. */
4941 for (len = 0; epilogue[len]; len++)
4942 ;
9392c110
JH
4943 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
4944 {
4945 if (GET_CODE (insn) == NOTE)
4946 {
4947 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4948 note = insn;
4949 }
4950 else if ((len -= contains (insn, epilogue)) == 0)
4951 {
4952 /* Find the epilogue-begin note if we haven't already, and
4953 move it to just before the first epilogue insn. */
4954 if (note == 0)
4955 {
4956 for (note = insn; note = PREV_INSN (note);)
4957 if (GET_CODE (note) == NOTE
4958 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
4959 break;
4960 }
4961 next = NEXT_INSN (note);
4962 prev = PREV_INSN (note);
4963 if (prev)
4964 NEXT_INSN (prev) = next;
4965 if (next)
4966 PREV_INSN (next) = prev;
4967 add_insn_after (note, PREV_INSN (insn));
4968 }
4969 }
bdac5f58
TW
4970 }
4971 }
4972#endif /* HAVE_prologue or HAVE_epilogue */
4973}
This page took 0.618366 seconds and 5 git commands to generate.