]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
(find_comparison_args): New args PMODE1, PMODE2.
[gcc.git] / gcc / function.c
CommitLineData
6f086dfc 1/* Expands front end tree to back end RTL for GNU C-Compiler
1b2ac438 2 Copyright (C) 1987, 1988, 1989, 1991, 1992 Free Software Foundation, Inc.
6f086dfc
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
29
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
34
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
39
40#include "config.h"
41
42#include <stdio.h>
43
44#include "rtl.h"
45#include "tree.h"
46#include "flags.h"
47#include "function.h"
48#include "insn-flags.h"
49#include "expr.h"
50#include "insn-codes.h"
51#include "regs.h"
52#include "hard-reg-set.h"
53#include "insn-config.h"
54#include "recog.h"
55#include "output.h"
bdac5f58 56#include "basic-block.h"
6f086dfc
RS
57
58/* Round a value to the lowest integer less than it that is a multiple of
59 the required alignment. Avoid using division in case the value is
60 negative. Assume the alignment is a power of two. */
61#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
62
63/* Similar, but round to the next highest integer that meets the
64 alignment. */
65#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
66
67/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
68 during rtl generation. If they are different register numbers, this is
69 always true. It may also be true if
70 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
71 generation. See fix_lexical_addr for details. */
72
73#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
74#define NEED_SEPARATE_AP
75#endif
76
77/* Number of bytes of args popped by function being compiled on its return.
78 Zero if no bytes are to be popped.
79 May affect compilation of return insn or of function epilogue. */
80
81int current_function_pops_args;
82
83/* Nonzero if function being compiled needs to be given an address
84 where the value should be stored. */
85
86int current_function_returns_struct;
87
88/* Nonzero if function being compiled needs to
89 return the address of where it has put a structure value. */
90
91int current_function_returns_pcc_struct;
92
93/* Nonzero if function being compiled needs to be passed a static chain. */
94
95int current_function_needs_context;
96
97/* Nonzero if function being compiled can call setjmp. */
98
99int current_function_calls_setjmp;
100
101/* Nonzero if function being compiled can call longjmp. */
102
103int current_function_calls_longjmp;
104
105/* Nonzero if function being compiled receives nonlocal gotos
106 from nested functions. */
107
108int current_function_has_nonlocal_label;
109
110/* Nonzero if function being compiled contains nested functions. */
111
112int current_function_contains_functions;
113
114/* Nonzero if function being compiled can call alloca,
115 either as a subroutine or builtin. */
116
117int current_function_calls_alloca;
118
119/* Nonzero if the current function returns a pointer type */
120
121int current_function_returns_pointer;
122
123/* If some insns can be deferred to the delay slots of the epilogue, the
124 delay list for them is recorded here. */
125
126rtx current_function_epilogue_delay_list;
127
128/* If function's args have a fixed size, this is that size, in bytes.
129 Otherwise, it is -1.
130 May affect compilation of return insn or of function epilogue. */
131
132int current_function_args_size;
133
134/* # bytes the prologue should push and pretend that the caller pushed them.
135 The prologue must do this, but only if parms can be passed in registers. */
136
137int current_function_pretend_args_size;
138
139/* # of bytes of outgoing arguments required to be pushed by the prologue.
140 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
141 and no stack adjusts will be done on function calls. */
142
143int current_function_outgoing_args_size;
144
145/* This is the offset from the arg pointer to the place where the first
146 anonymous arg can be found, if there is one. */
147
148rtx current_function_arg_offset_rtx;
149
150/* Nonzero if current function uses varargs.h or equivalent.
151 Zero for functions that use stdarg.h. */
152
153int current_function_varargs;
154
155/* Quantities of various kinds of registers
156 used for the current function's args. */
157
158CUMULATIVE_ARGS current_function_args_info;
159
160/* Name of function now being compiled. */
161
162char *current_function_name;
163
164/* If non-zero, an RTL expression for that location at which the current
165 function returns its result. Always equal to
166 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
167 independently of the tree structures. */
168
169rtx current_function_return_rtx;
170
171/* Nonzero if the current function uses the constant pool. */
172
173int current_function_uses_const_pool;
174
175/* Nonzero if the current function uses pic_offset_table_rtx. */
176int current_function_uses_pic_offset_table;
177
178/* The arg pointer hard register, or the pseudo into which it was copied. */
179rtx current_function_internal_arg_pointer;
180
181/* The FUNCTION_DECL for an inline function currently being expanded. */
182tree inline_function_decl;
183
184/* Number of function calls seen so far in current function. */
185
186int function_call_count;
187
188/* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
189 (labels to which there can be nonlocal gotos from nested functions)
190 in this function. */
191
192tree nonlocal_labels;
193
194/* RTX for stack slot that holds the current handler for nonlocal gotos.
195 Zero when function does not have nonlocal labels. */
196
197rtx nonlocal_goto_handler_slot;
198
199/* RTX for stack slot that holds the stack pointer value to restore
200 for a nonlocal goto.
201 Zero when function does not have nonlocal labels. */
202
203rtx nonlocal_goto_stack_level;
204
205/* Label that will go on parm cleanup code, if any.
206 Jumping to this label runs cleanup code for parameters, if
207 such code must be run. Following this code is the logical return label. */
208
209rtx cleanup_label;
210
211/* Label that will go on function epilogue.
212 Jumping to this label serves as a "return" instruction
213 on machines which require execution of the epilogue on all returns. */
214
215rtx return_label;
216
217/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
218 So we can mark them all live at the end of the function, if nonopt. */
219rtx save_expr_regs;
220
221/* List (chain of EXPR_LISTs) of all stack slots in this function.
222 Made for the sake of unshare_all_rtl. */
223rtx stack_slot_list;
224
225/* Chain of all RTL_EXPRs that have insns in them. */
226tree rtl_expr_chain;
227
228/* Label to jump back to for tail recursion, or 0 if we have
229 not yet needed one for this function. */
230rtx tail_recursion_label;
231
232/* Place after which to insert the tail_recursion_label if we need one. */
233rtx tail_recursion_reentry;
234
235/* Location at which to save the argument pointer if it will need to be
236 referenced. There are two cases where this is done: if nonlocal gotos
237 exist, or if vars stored at an offset from the argument pointer will be
238 needed by inner routines. */
239
240rtx arg_pointer_save_area;
241
242/* Offset to end of allocated area of stack frame.
243 If stack grows down, this is the address of the last stack slot allocated.
244 If stack grows up, this is the address for the next slot. */
245int frame_offset;
246
247/* List (chain of TREE_LISTs) of static chains for containing functions.
248 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
249 in an RTL_EXPR in the TREE_VALUE. */
250static tree context_display;
251
252/* List (chain of TREE_LISTs) of trampolines for nested functions.
253 The trampoline sets up the static chain and jumps to the function.
254 We supply the trampoline's address when the function's address is requested.
255
256 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
257 in an RTL_EXPR in the TREE_VALUE. */
258static tree trampoline_list;
259
260/* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
261static rtx parm_birth_insn;
262
263#if 0
264/* Nonzero if a stack slot has been generated whose address is not
265 actually valid. It means that the generated rtl must all be scanned
266 to detect and correct the invalid addresses where they occur. */
267static int invalid_stack_slot;
268#endif
269
270/* Last insn of those whose job was to put parms into their nominal homes. */
271static rtx last_parm_insn;
272
273/* 1 + last pseudo register number used for loading a copy
274 of a parameter of this function. */
275static int max_parm_reg;
276
277/* Vector indexed by REGNO, containing location on stack in which
278 to put the parm which is nominally in pseudo register REGNO,
279 if we discover that that parm must go in the stack. */
280static rtx *parm_reg_stack_loc;
281
282#if 0 /* Turned off because 0 seems to work just as well. */
283/* Cleanup lists are required for binding levels regardless of whether
284 that binding level has cleanups or not. This node serves as the
285 cleanup list whenever an empty list is required. */
286static tree empty_cleanup_list;
287#endif
288
289/* Nonzero once virtual register instantiation has been done.
290 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
291static int virtuals_instantiated;
292
293/* Nonzero if we need to distinguish between the return value of this function
294 and the return value of a function called by this function. This helps
295 integrate.c */
296
297extern int rtx_equal_function_value_matters;
298
299void fixup_gotos ();
300
301static tree round_down ();
302static rtx round_trampoline_addr ();
303static rtx fixup_stack_1 ();
304static void fixup_var_refs ();
305static void fixup_var_refs_insns ();
306static void fixup_var_refs_1 ();
307static void optimize_bit_field ();
308static void instantiate_decls ();
309static void instantiate_decls_1 ();
5a73491b 310static void instantiate_decl ();
6f086dfc
RS
311static int instantiate_virtual_regs_1 ();
312static rtx fixup_memory_subreg ();
313static rtx walk_fixup_memory_subreg ();
314\f
315/* In order to evaluate some expressions, such as function calls returning
316 structures in memory, we need to temporarily allocate stack locations.
317 We record each allocated temporary in the following structure.
318
319 Associated with each temporary slot is a nesting level. When we pop up
320 one level, all temporaries associated with the previous level are freed.
321 Normally, all temporaries are freed after the execution of the statement
322 in which they were created. However, if we are inside a ({...}) grouping,
323 the result may be in a temporary and hence must be preserved. If the
324 result could be in a temporary, we preserve it if we can determine which
325 one it is in. If we cannot determine which temporary may contain the
326 result, all temporaries are preserved. A temporary is preserved by
327 pretending it was allocated at the previous nesting level.
328
329 Automatic variables are also assigned temporary slots, at the nesting
330 level where they are defined. They are marked a "kept" so that
331 free_temp_slots will not free them. */
332
333struct temp_slot
334{
335 /* Points to next temporary slot. */
336 struct temp_slot *next;
337 /* The rtx to used to reference the slot. */
338 rtx slot;
339 /* The size, in units, of the slot. */
340 int size;
341 /* Non-zero if this temporary is currently in use. */
342 char in_use;
343 /* Nesting level at which this slot is being used. */
344 int level;
345 /* Non-zero if this should survive a call to free_temp_slots. */
346 int keep;
347};
348
349/* List of all temporaries allocated, both available and in use. */
350
351struct temp_slot *temp_slots;
352
353/* Current nesting level for temporaries. */
354
355int temp_slot_level;
356\f
357/* Pointer to chain of `struct function' for containing functions. */
358struct function *outer_function_chain;
359
360/* Given a function decl for a containing function,
361 return the `struct function' for it. */
362
363struct function *
364find_function_data (decl)
365 tree decl;
366{
367 struct function *p;
368 for (p = outer_function_chain; p; p = p->next)
369 if (p->decl == decl)
370 return p;
371 abort ();
372}
373
374/* Save the current context for compilation of a nested function.
375 This is called from language-specific code.
376 The caller is responsible for saving any language-specific status,
6dc42e49 377 since this function knows only about language-independent variables. */
6f086dfc
RS
378
379void
380push_function_context ()
381{
382 struct function *p = (struct function *) xmalloc (sizeof (struct function));
383
384 p->next = outer_function_chain;
385 outer_function_chain = p;
386
387 p->name = current_function_name;
388 p->decl = current_function_decl;
389 p->pops_args = current_function_pops_args;
390 p->returns_struct = current_function_returns_struct;
391 p->returns_pcc_struct = current_function_returns_pcc_struct;
392 p->needs_context = current_function_needs_context;
393 p->calls_setjmp = current_function_calls_setjmp;
394 p->calls_longjmp = current_function_calls_longjmp;
395 p->calls_alloca = current_function_calls_alloca;
396 p->has_nonlocal_label = current_function_has_nonlocal_label;
397 p->args_size = current_function_args_size;
398 p->pretend_args_size = current_function_pretend_args_size;
399 p->arg_offset_rtx = current_function_arg_offset_rtx;
400 p->uses_const_pool = current_function_uses_const_pool;
401 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
402 p->internal_arg_pointer = current_function_internal_arg_pointer;
403 p->max_parm_reg = max_parm_reg;
404 p->parm_reg_stack_loc = parm_reg_stack_loc;
405 p->outgoing_args_size = current_function_outgoing_args_size;
406 p->return_rtx = current_function_return_rtx;
407 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
408 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
409 p->nonlocal_labels = nonlocal_labels;
410 p->cleanup_label = cleanup_label;
411 p->return_label = return_label;
412 p->save_expr_regs = save_expr_regs;
413 p->stack_slot_list = stack_slot_list;
414 p->parm_birth_insn = parm_birth_insn;
415 p->frame_offset = frame_offset;
416 p->tail_recursion_label = tail_recursion_label;
417 p->tail_recursion_reentry = tail_recursion_reentry;
418 p->arg_pointer_save_area = arg_pointer_save_area;
419 p->rtl_expr_chain = rtl_expr_chain;
420 p->last_parm_insn = last_parm_insn;
421 p->context_display = context_display;
422 p->trampoline_list = trampoline_list;
423 p->function_call_count = function_call_count;
424 p->temp_slots = temp_slots;
425 p->temp_slot_level = temp_slot_level;
426 p->fixup_var_refs_queue = 0;
427
428 save_tree_status (p);
429 save_storage_status (p);
430 save_emit_status (p);
431 init_emit ();
432 save_expr_status (p);
433 save_stmt_status (p);
434}
435
436/* Restore the last saved context, at the end of a nested function.
437 This function is called from language-specific code. */
438
439void
440pop_function_context ()
441{
442 struct function *p = outer_function_chain;
443
444 outer_function_chain = p->next;
445
446 current_function_name = p->name;
447 current_function_decl = p->decl;
448 current_function_pops_args = p->pops_args;
449 current_function_returns_struct = p->returns_struct;
450 current_function_returns_pcc_struct = p->returns_pcc_struct;
451 current_function_needs_context = p->needs_context;
452 current_function_calls_setjmp = p->calls_setjmp;
453 current_function_calls_longjmp = p->calls_longjmp;
454 current_function_calls_alloca = p->calls_alloca;
455 current_function_has_nonlocal_label = p->has_nonlocal_label;
456 current_function_contains_functions = 1;
457 current_function_args_size = p->args_size;
458 current_function_pretend_args_size = p->pretend_args_size;
459 current_function_arg_offset_rtx = p->arg_offset_rtx;
460 current_function_uses_const_pool = p->uses_const_pool;
461 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
462 current_function_internal_arg_pointer = p->internal_arg_pointer;
463 max_parm_reg = p->max_parm_reg;
464 parm_reg_stack_loc = p->parm_reg_stack_loc;
465 current_function_outgoing_args_size = p->outgoing_args_size;
466 current_function_return_rtx = p->return_rtx;
467 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
468 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
469 nonlocal_labels = p->nonlocal_labels;
470 cleanup_label = p->cleanup_label;
471 return_label = p->return_label;
472 save_expr_regs = p->save_expr_regs;
473 stack_slot_list = p->stack_slot_list;
474 parm_birth_insn = p->parm_birth_insn;
475 frame_offset = p->frame_offset;
476 tail_recursion_label = p->tail_recursion_label;
477 tail_recursion_reentry = p->tail_recursion_reentry;
478 arg_pointer_save_area = p->arg_pointer_save_area;
479 rtl_expr_chain = p->rtl_expr_chain;
480 last_parm_insn = p->last_parm_insn;
481 context_display = p->context_display;
482 trampoline_list = p->trampoline_list;
483 function_call_count = p->function_call_count;
484 temp_slots = p->temp_slots;
485 temp_slot_level = p->temp_slot_level;
486
487 restore_tree_status (p);
488 restore_storage_status (p);
489 restore_expr_status (p);
490 restore_emit_status (p);
491 restore_stmt_status (p);
492
493 /* Finish doing put_var_into_stack for any of our variables
494 which became addressable during the nested function. */
495 {
496 struct var_refs_queue *queue = p->fixup_var_refs_queue;
497 for (; queue; queue = queue->next)
00d8a4c1 498 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
6f086dfc
RS
499 }
500
501 free (p);
502
503 /* Reset variables that have known state during rtx generation. */
504 rtx_equal_function_value_matters = 1;
505 virtuals_instantiated = 0;
506}
507\f
508/* Allocate fixed slots in the stack frame of the current function. */
509
510/* Return size needed for stack frame based on slots so far allocated.
511 This size counts from zero. It is not rounded to STACK_BOUNDARY;
512 the caller may have to do that. */
513
514int
515get_frame_size ()
516{
517#ifdef FRAME_GROWS_DOWNWARD
518 return -frame_offset;
519#else
520 return frame_offset;
521#endif
522}
523
524/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
525 with machine mode MODE.
526
527 ALIGN controls the amount of alignment for the address of the slot:
528 0 means according to MODE,
529 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
530 positive specifies alignment boundary in bits.
531
532 We do not round to stack_boundary here. */
533
534rtx
535assign_stack_local (mode, size, align)
536 enum machine_mode mode;
537 int size;
538 int align;
539{
540 register rtx x, addr;
541 int bigend_correction = 0;
542 int alignment;
543
544 if (align == 0)
545 {
546 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
547 if (mode == BLKmode)
548 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
549 }
550 else if (align == -1)
551 {
552 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
553 size = CEIL_ROUND (size, alignment);
554 }
555 else
556 alignment = align / BITS_PER_UNIT;
557
6f086dfc
RS
558 /* Round frame offset to that alignment.
559 We must be careful here, since FRAME_OFFSET might be negative and
560 division with a negative dividend isn't as well defined as we might
561 like. So we instead assume that ALIGNMENT is a power of two and
562 use logical operations which are unambiguous. */
563#ifdef FRAME_GROWS_DOWNWARD
564 frame_offset = FLOOR_ROUND (frame_offset, alignment);
565#else
566 frame_offset = CEIL_ROUND (frame_offset, alignment);
567#endif
568
569 /* On a big-endian machine, if we are allocating more space than we will use,
570 use the least significant bytes of those that are allocated. */
571#if BYTES_BIG_ENDIAN
572 if (mode != BLKmode)
573 bigend_correction = size - GET_MODE_SIZE (mode);
574#endif
575
576#ifdef FRAME_GROWS_DOWNWARD
577 frame_offset -= size;
578#endif
579
580 /* If we have already instantiated virtual registers, return the actual
581 address relative to the frame pointer. */
582 if (virtuals_instantiated)
583 addr = plus_constant (frame_pointer_rtx,
584 (frame_offset + bigend_correction
585 + STARTING_FRAME_OFFSET));
586 else
587 addr = plus_constant (virtual_stack_vars_rtx,
588 frame_offset + bigend_correction);
589
590#ifndef FRAME_GROWS_DOWNWARD
591 frame_offset += size;
592#endif
593
594 x = gen_rtx (MEM, mode, addr);
595
596 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
597
598 return x;
599}
600
601/* Assign a stack slot in a containing function.
602 First three arguments are same as in preceding function.
603 The last argument specifies the function to allocate in. */
604
605rtx
606assign_outer_stack_local (mode, size, align, function)
607 enum machine_mode mode;
608 int size;
609 int align;
610 struct function *function;
611{
612 register rtx x, addr;
613 int bigend_correction = 0;
614 int alignment;
615
616 /* Allocate in the memory associated with the function in whose frame
617 we are assigning. */
618 push_obstacks (function->function_obstack,
619 function->function_maybepermanent_obstack);
620
621 if (align == 0)
622 {
623 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
624 if (mode == BLKmode)
625 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
626 }
627 else if (align == -1)
628 {
629 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
630 size = CEIL_ROUND (size, alignment);
631 }
632 else
633 alignment = align / BITS_PER_UNIT;
634
6f086dfc
RS
635 /* Round frame offset to that alignment. */
636#ifdef FRAME_GROWS_DOWNWARD
637 frame_offset = FLOOR_ROUND (frame_offset, alignment);
638#else
639 frame_offset = CEIL_ROUND (frame_offset, alignment);
640#endif
641
642 /* On a big-endian machine, if we are allocating more space than we will use,
643 use the least significant bytes of those that are allocated. */
644#if BYTES_BIG_ENDIAN
645 if (mode != BLKmode)
646 bigend_correction = size - GET_MODE_SIZE (mode);
647#endif
648
649#ifdef FRAME_GROWS_DOWNWARD
650 function->frame_offset -= size;
651#endif
652 addr = plus_constant (virtual_stack_vars_rtx,
653 function->frame_offset + bigend_correction);
654#ifndef FRAME_GROWS_DOWNWARD
655 function->frame_offset += size;
656#endif
657
658 x = gen_rtx (MEM, mode, addr);
659
660 function->stack_slot_list
661 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
662
663 pop_obstacks ();
664
665 return x;
666}
667\f
668/* Allocate a temporary stack slot and record it for possible later
669 reuse.
670
671 MODE is the machine mode to be given to the returned rtx.
672
673 SIZE is the size in units of the space required. We do no rounding here
674 since assign_stack_local will do any required rounding.
675
676 KEEP is non-zero if this slot is to be retained after a call to
677 free_temp_slots. Automatic variables for a block are allocated with this
678 flag. */
679
680rtx
681assign_stack_temp (mode, size, keep)
682 enum machine_mode mode;
683 int size;
684 int keep;
685{
686 struct temp_slot *p, *best_p = 0;
687
688 /* First try to find an available, already-allocated temporary that is the
689 exact size we require. */
690 for (p = temp_slots; p; p = p->next)
691 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
692 break;
693
694 /* If we didn't find, one, try one that is larger than what we want. We
695 find the smallest such. */
696 if (p == 0)
697 for (p = temp_slots; p; p = p->next)
698 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
699 && (best_p == 0 || best_p->size > p->size))
700 best_p = p;
701
702 /* Make our best, if any, the one to use. */
703 if (best_p)
704 p = best_p;
705
706 /* If we still didn't find one, make a new temporary. */
707 if (p == 0)
708 {
709 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
710 p->size = size;
711 /* If the temp slot mode doesn't indicate the alignment,
712 use the largest possible, so no one will be disappointed. */
713 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
714 p->next = temp_slots;
715 temp_slots = p;
716 }
717
718 p->in_use = 1;
719 p->level = temp_slot_level;
720 p->keep = keep;
721 return p->slot;
722}
723\f
724/* If X could be a reference to a temporary slot, mark that slot as belonging
725 to the to one level higher. If X matched one of our slots, just mark that
726 one. Otherwise, we can't easily predict which it is, so upgrade all of
727 them. Kept slots need not be touched.
728
729 This is called when an ({...}) construct occurs and a statement
730 returns a value in memory. */
731
732void
733preserve_temp_slots (x)
734 rtx x;
735{
736 struct temp_slot *p;
737
738 /* If X is not in memory or is at a constant address, it cannot be in
739 a temporary slot. */
740 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
741 return;
742
743 /* First see if we can find a match. */
744 for (p = temp_slots; p; p = p->next)
745 if (p->in_use && x == p->slot)
746 {
747 p->level--;
748 return;
749 }
750
751 /* Otherwise, preserve all non-kept slots at this level. */
752 for (p = temp_slots; p; p = p->next)
753 if (p->in_use && p->level == temp_slot_level && ! p->keep)
754 p->level--;
755}
756
757/* Free all temporaries used so far. This is normally called at the end
758 of generating code for a statement. */
759
760void
761free_temp_slots ()
762{
763 struct temp_slot *p;
764
765 for (p = temp_slots; p; p = p->next)
766 if (p->in_use && p->level == temp_slot_level && ! p->keep)
767 p->in_use = 0;
768}
769
770/* Push deeper into the nesting level for stack temporaries. */
771
772void
773push_temp_slots ()
774{
775 /* For GNU C++, we must allow a sequence to be emitted anywhere in
776 the level where the sequence was started. By not changing levels
777 when the compiler is inside a sequence, the temporaries for the
778 sequence and the temporaries will not unwittingly conflict with
779 the temporaries for other sequences and/or code at that level. */
780 if (in_sequence_p ())
781 return;
782
783 temp_slot_level++;
784}
785
786/* Pop a temporary nesting level. All slots in use in the current level
787 are freed. */
788
789void
790pop_temp_slots ()
791{
792 struct temp_slot *p;
793
794 /* See comment in push_temp_slots about why we don't change levels
795 in sequences. */
796 if (in_sequence_p ())
797 return;
798
799 for (p = temp_slots; p; p = p->next)
800 if (p->in_use && p->level == temp_slot_level)
801 p->in_use = 0;
802
803 temp_slot_level--;
804}
805\f
806/* Retroactively move an auto variable from a register to a stack slot.
807 This is done when an address-reference to the variable is seen. */
808
809void
810put_var_into_stack (decl)
811 tree decl;
812{
813 register rtx reg;
814 register rtx new = 0;
00d8a4c1 815 enum machine_mode promoted_mode, decl_mode;
6f086dfc
RS
816 struct function *function = 0;
817 tree context = decl_function_context (decl);
818
00d8a4c1 819 /* Get the current rtl used for this object and it's original mode. */
6f086dfc 820 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
00d8a4c1
RK
821 promoted_mode = GET_MODE (reg);
822
823 /* Get the declared mode for this object. */
824 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
825 : DECL_MODE (decl));
6f086dfc
RS
826
827 /* If this variable comes from an outer function,
828 find that function's saved context. */
829 if (context != current_function_decl)
830 for (function = outer_function_chain; function; function = function->next)
831 if (function->decl == context)
832 break;
833
834 /* No need to do anything if decl has no rtx yet
835 since in that case caller is setting TREE_ADDRESSABLE
836 and a stack slot will be assigned when the rtl is made. */
837 if (reg == 0)
838 return;
839
840 /* If this is a variable-size object with a pseudo to address it,
841 put that pseudo into the stack, if the var is nonlocal. */
a82ad570 842 if (DECL_NONLOCAL (decl)
6f086dfc
RS
843 && GET_CODE (reg) == MEM
844 && GET_CODE (XEXP (reg, 0)) == REG
845 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
846 reg = XEXP (reg, 0);
847 if (GET_CODE (reg) != REG)
848 return;
849
850 if (function)
851 {
852 if (REGNO (reg) < function->max_parm_reg)
853 new = function->parm_reg_stack_loc[REGNO (reg)];
854 if (new == 0)
855 new = assign_outer_stack_local (GET_MODE (reg),
00d8a4c1 856 GET_MODE_SIZE (decl_mode),
6f086dfc
RS
857 0, function);
858 }
859 else
860 {
861 if (REGNO (reg) < max_parm_reg)
862 new = parm_reg_stack_loc[REGNO (reg)];
863 if (new == 0)
864 new = assign_stack_local (GET_MODE (reg),
00d8a4c1 865 GET_MODE_SIZE (decl_mode), 0);
6f086dfc
RS
866 }
867
868 XEXP (reg, 0) = XEXP (new, 0);
869 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
870 REG_USERVAR_P (reg) = 0;
871 PUT_CODE (reg, MEM);
00d8a4c1 872 PUT_MODE (reg, decl_mode);
6f086dfc
RS
873
874 /* If this is a memory ref that contains aggregate components,
875 mark it as such for cse and loop optimize. */
876 MEM_IN_STRUCT_P (reg)
877 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
878 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
879 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
880
881 /* Now make sure that all refs to the variable, previously made
882 when it was a register, are fixed up to be valid again. */
883 if (function)
884 {
885 struct var_refs_queue *temp;
886
887 /* Variable is inherited; fix it up when we get back to its function. */
888 push_obstacks (function->function_obstack,
889 function->function_maybepermanent_obstack);
890 temp
891 = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue));
892 temp->modified = reg;
00d8a4c1
RK
893 temp->promoted_mode = promoted_mode;
894 temp->unsignedp = TREE_UNSIGNED (TREE_TYPE (decl));
6f086dfc
RS
895 temp->next = function->fixup_var_refs_queue;
896 function->fixup_var_refs_queue = temp;
897 pop_obstacks ();
898 }
899 else
900 /* Variable is local; fix it up now. */
00d8a4c1 901 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (TREE_TYPE (decl)));
6f086dfc
RS
902}
903\f
904static void
00d8a4c1 905fixup_var_refs (var, promoted_mode, unsignedp)
6f086dfc 906 rtx var;
00d8a4c1
RK
907 enum machine_mode promoted_mode;
908 int unsignedp;
6f086dfc
RS
909{
910 tree pending;
911 rtx first_insn = get_insns ();
912 struct sequence_stack *stack = sequence_stack;
913 tree rtl_exps = rtl_expr_chain;
914
915 /* Must scan all insns for stack-refs that exceed the limit. */
00d8a4c1 916 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
6f086dfc
RS
917
918 /* Scan all pending sequences too. */
919 for (; stack; stack = stack->next)
920 {
921 push_to_sequence (stack->first);
00d8a4c1
RK
922 fixup_var_refs_insns (var, promoted_mode, unsignedp,
923 stack->first, stack->next != 0);
6f086dfc
RS
924 /* Update remembered end of sequence
925 in case we added an insn at the end. */
926 stack->last = get_last_insn ();
927 end_sequence ();
928 }
929
930 /* Scan all waiting RTL_EXPRs too. */
931 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
932 {
933 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
934 if (seq != const0_rtx && seq != 0)
935 {
936 push_to_sequence (seq);
00d8a4c1 937 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
6f086dfc
RS
938 end_sequence ();
939 }
940 }
941}
942\f
943/* This structure is used by the following two functions to record MEMs or
944 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
945 VAR as an address. We need to maintain this list in case two operands of
946 an insn were required to match; in that case we must ensure we use the
947 same replacement. */
948
949struct fixup_replacement
950{
951 rtx old;
952 rtx new;
953 struct fixup_replacement *next;
954};
955
956/* REPLACEMENTS is a pointer to a list of the above structures and X is
957 some part of an insn. Return a struct fixup_replacement whose OLD
958 value is equal to X. Allocate a new structure if no such entry exists. */
959
960static struct fixup_replacement *
961find_replacement (replacements, x)
962 struct fixup_replacement **replacements;
963 rtx x;
964{
965 struct fixup_replacement *p;
966
967 /* See if we have already replaced this. */
968 for (p = *replacements; p && p->old != x; p = p->next)
969 ;
970
971 if (p == 0)
972 {
973 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
974 p->old = x;
975 p->new = 0;
976 p->next = *replacements;
977 *replacements = p;
978 }
979
980 return p;
981}
982
983/* Scan the insn-chain starting with INSN for refs to VAR
984 and fix them up. TOPLEVEL is nonzero if this chain is the
985 main chain of insns for the current function. */
986
987static void
00d8a4c1 988fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
6f086dfc 989 rtx var;
00d8a4c1
RK
990 enum machine_mode promoted_mode;
991 int unsignedp;
6f086dfc
RS
992 rtx insn;
993 int toplevel;
994{
995 while (insn)
996 {
997 rtx next = NEXT_INSN (insn);
998 rtx note;
999 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1000 || GET_CODE (insn) == JUMP_INSN)
1001 {
1002 /* The insn to load VAR from a home in the arglist
1003 is now a no-op. When we see it, just delete it. */
1004 if (toplevel
1005 && GET_CODE (PATTERN (insn)) == SET
1006 && SET_DEST (PATTERN (insn)) == var
1007 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1008 {
1009 next = delete_insn (insn);
1010 if (insn == last_parm_insn)
1011 last_parm_insn = PREV_INSN (next);
1012 }
1013 else
1014 {
1015 /* See if we have to do anything to INSN now that VAR is in
1016 memory. If it needs to be loaded into a pseudo, use a single
1017 pseudo for the entire insn in case there is a MATCH_DUP
1018 between two operands. We pass a pointer to the head of
1019 a list of struct fixup_replacements. If fixup_var_refs_1
1020 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1021 it will record them in this list.
1022
1023 If it allocated a pseudo for any replacement, we copy into
1024 it here. */
1025
1026 struct fixup_replacement *replacements = 0;
1027
00d8a4c1
RK
1028 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1029 &replacements);
6f086dfc
RS
1030
1031 while (replacements)
1032 {
1033 if (GET_CODE (replacements->new) == REG)
1034 {
1035 rtx insert_before;
00d8a4c1 1036 rtx seq;
6f086dfc
RS
1037
1038 /* OLD might be a (subreg (mem)). */
1039 if (GET_CODE (replacements->old) == SUBREG)
1040 replacements->old
1041 = fixup_memory_subreg (replacements->old, insn, 0);
1042 else
1043 replacements->old
1044 = fixup_stack_1 (replacements->old, insn);
1045
1046 /* We can not separate USE insns from the CALL_INSN
1047 that they belong to. If this is a CALL_INSN, insert
b335c2cc 1048 the move insn before the USE insns preceding it
6f086dfc
RS
1049 instead of immediately before the insn. */
1050 if (GET_CODE (insn) == CALL_INSN)
1051 {
1052 insert_before = insn;
1053 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1054 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1055 insert_before = PREV_INSN (insert_before);
1056 }
1057 else
1058 insert_before = insn;
1059
00d8a4c1
RK
1060 /* If we are changing the mode, do a conversion.
1061 This might be wasteful, but combine.c will
1062 eliminate much of the waste. */
1063
1064 if (GET_MODE (replacements->new)
1065 != GET_MODE (replacements->old))
1066 {
1067 start_sequence ();
1068 convert_move (replacements->new,
1069 replacements->old, unsignedp);
1070 seq = gen_sequence ();
1071 end_sequence ();
1072 }
1073 else
1074 seq = gen_move_insn (replacements->new,
1075 replacements->old);
1076
1077 emit_insn_before (seq, insert_before);
6f086dfc
RS
1078 }
1079
1080 replacements = replacements->next;
1081 }
1082 }
1083
1084 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1085 But don't touch other insns referred to by reg-notes;
1086 we will get them elsewhere. */
1087 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1088 if (GET_CODE (note) != INSN_LIST)
1089 XEXP (note, 0) = walk_fixup_memory_subreg (XEXP (note, 0), insn);
1090 }
1091 insn = next;
1092 }
1093}
1094\f
00d8a4c1
RK
1095/* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1096 See if the rtx expression at *LOC in INSN needs to be changed.
6f086dfc
RS
1097
1098 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1099 contain a list of original rtx's and replacements. If we find that we need
1100 to modify this insn by replacing a memory reference with a pseudo or by
1101 making a new MEM to implement a SUBREG, we consult that list to see if
1102 we have already chosen a replacement. If none has already been allocated,
1103 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1104 or the SUBREG, as appropriate, to the pseudo. */
1105
1106static void
00d8a4c1 1107fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
6f086dfc 1108 register rtx var;
00d8a4c1 1109 enum machine_mode promoted_mode;
6f086dfc
RS
1110 register rtx *loc;
1111 rtx insn;
1112 struct fixup_replacement **replacements;
1113{
1114 register int i;
1115 register rtx x = *loc;
1116 RTX_CODE code = GET_CODE (x);
1117 register char *fmt;
1118 register rtx tem, tem1;
1119 struct fixup_replacement *replacement;
1120
1121 switch (code)
1122 {
1123 case MEM:
1124 if (var == x)
1125 {
1126 /* If we already have a replacement, use it. Otherwise,
1127 try to fix up this address in case it is invalid. */
1128
1129 replacement = find_replacement (replacements, var);
1130 if (replacement->new)
1131 {
1132 *loc = replacement->new;
1133 return;
1134 }
1135
1136 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1137
00d8a4c1
RK
1138 /* Unless we are forcing memory to register or we changed the mode,
1139 we can leave things the way they are if the insn is valid. */
6f086dfc
RS
1140
1141 INSN_CODE (insn) = -1;
00d8a4c1
RK
1142 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1143 && recog_memoized (insn) >= 0)
6f086dfc
RS
1144 return;
1145
00d8a4c1 1146 *loc = replacement->new = gen_reg_rtx (promoted_mode);
6f086dfc
RS
1147 return;
1148 }
1149
1150 /* If X contains VAR, we need to unshare it here so that we update
1151 each occurrence separately. But all identical MEMs in one insn
1152 must be replaced with the same rtx because of the possibility of
1153 MATCH_DUPs. */
1154
1155 if (reg_mentioned_p (var, x))
1156 {
1157 replacement = find_replacement (replacements, x);
1158 if (replacement->new == 0)
1159 replacement->new = copy_most_rtx (x, var);
1160
1161 *loc = x = replacement->new;
1162 }
1163 break;
1164
1165 case REG:
1166 case CC0:
1167 case PC:
1168 case CONST_INT:
1169 case CONST:
1170 case SYMBOL_REF:
1171 case LABEL_REF:
1172 case CONST_DOUBLE:
1173 return;
1174
1175 case SIGN_EXTRACT:
1176 case ZERO_EXTRACT:
1177 /* Note that in some cases those types of expressions are altered
1178 by optimize_bit_field, and do not survive to get here. */
1179 if (XEXP (x, 0) == var
1180 || (GET_CODE (XEXP (x, 0)) == SUBREG
1181 && SUBREG_REG (XEXP (x, 0)) == var))
1182 {
1183 /* Get TEM as a valid MEM in the mode presently in the insn.
1184
1185 We don't worry about the possibility of MATCH_DUP here; it
1186 is highly unlikely and would be tricky to handle. */
1187
1188 tem = XEXP (x, 0);
1189 if (GET_CODE (tem) == SUBREG)
1190 tem = fixup_memory_subreg (tem, insn, 1);
1191 tem = fixup_stack_1 (tem, insn);
1192
1193 /* Unless we want to load from memory, get TEM into the proper mode
1194 for an extract from memory. This can only be done if the
1195 extract is at a constant position and length. */
1196
1197 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1198 && GET_CODE (XEXP (x, 2)) == CONST_INT
1199 && ! mode_dependent_address_p (XEXP (tem, 0))
1200 && ! MEM_VOLATILE_P (tem))
1201 {
1202 enum machine_mode wanted_mode = VOIDmode;
1203 enum machine_mode is_mode = GET_MODE (tem);
1204 int width = INTVAL (XEXP (x, 1));
1205 int pos = INTVAL (XEXP (x, 2));
1206
1207#ifdef HAVE_extzv
1208 if (GET_CODE (x) == ZERO_EXTRACT)
1209 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1210#endif
1211#ifdef HAVE_extv
1212 if (GET_CODE (x) == SIGN_EXTRACT)
1213 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1214#endif
6dc42e49 1215 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
1216 if (wanted_mode != VOIDmode
1217 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1218 {
1219 int offset = pos / BITS_PER_UNIT;
1220 rtx old_pos = XEXP (x, 2);
1221 rtx newmem;
1222
1223 /* If the bytes and bits are counted differently, we
1224 must adjust the offset. */
1225#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1226 offset = (GET_MODE_SIZE (is_mode)
1227 - GET_MODE_SIZE (wanted_mode) - offset);
1228#endif
1229
1230 pos %= GET_MODE_BITSIZE (wanted_mode);
1231
1232 newmem = gen_rtx (MEM, wanted_mode,
1233 plus_constant (XEXP (tem, 0), offset));
1234 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1235 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1236 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1237
1238 /* Make the change and see if the insn remains valid. */
1239 INSN_CODE (insn) = -1;
1240 XEXP (x, 0) = newmem;
5f4f0e22 1241 XEXP (x, 2) = GEN_INT (pos);
6f086dfc
RS
1242
1243 if (recog_memoized (insn) >= 0)
1244 return;
1245
1246 /* Otherwise, restore old position. XEXP (x, 0) will be
1247 restored later. */
1248 XEXP (x, 2) = old_pos;
1249 }
1250 }
1251
1252 /* If we get here, the bitfield extract insn can't accept a memory
1253 reference. Copy the input into a register. */
1254
1255 tem1 = gen_reg_rtx (GET_MODE (tem));
1256 emit_insn_before (gen_move_insn (tem1, tem), insn);
1257 XEXP (x, 0) = tem1;
1258 return;
1259 }
1260 break;
1261
1262 case SUBREG:
1263 if (SUBREG_REG (x) == var)
1264 {
00d8a4c1
RK
1265 /* If this is a special SUBREG made because VAR was promoted
1266 from a wider mode, replace it with VAR and call ourself
1267 recursively, this time saying that the object previously
1268 had its current mode (by virtue of the SUBREG). */
1269
1270 if (SUBREG_PROMOTED_VAR_P (x))
1271 {
1272 *loc = var;
1273 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1274 return;
1275 }
1276
6f086dfc
RS
1277 /* If this SUBREG makes VAR wider, it has become a paradoxical
1278 SUBREG with VAR in memory, but these aren't allowed at this
1279 stage of the compilation. So load VAR into a pseudo and take
1280 a SUBREG of that pseudo. */
1281 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1282 {
1283 replacement = find_replacement (replacements, var);
1284 if (replacement->new == 0)
1285 replacement->new = gen_reg_rtx (GET_MODE (var));
1286 SUBREG_REG (x) = replacement->new;
1287 return;
1288 }
1289
1290 /* See if we have already found a replacement for this SUBREG.
1291 If so, use it. Otherwise, make a MEM and see if the insn
1292 is recognized. If not, or if we should force MEM into a register,
1293 make a pseudo for this SUBREG. */
1294 replacement = find_replacement (replacements, x);
1295 if (replacement->new)
1296 {
1297 *loc = replacement->new;
1298 return;
1299 }
1300
1301 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1302
1303 if (! flag_force_mem && recog_memoized (insn) >= 0)
1304 return;
1305
1306 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1307 return;
1308 }
1309 break;
1310
1311 case SET:
1312 /* First do special simplification of bit-field references. */
1313 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1314 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1315 optimize_bit_field (x, insn, 0);
1316 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1317 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
5f4f0e22 1318 optimize_bit_field (x, insn, NULL_PTR);
6f086dfc
RS
1319
1320 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1321 insn into a pseudo and store the low part of the pseudo into VAR. */
1322 if (GET_CODE (SET_DEST (x)) == SUBREG
1323 && SUBREG_REG (SET_DEST (x)) == var
1324 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1325 > GET_MODE_SIZE (GET_MODE (var))))
1326 {
1327 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1328 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1329 tem)),
1330 insn);
1331 break;
1332 }
1333
1334 {
1335 rtx dest = SET_DEST (x);
1336 rtx src = SET_SRC (x);
1337 rtx outerdest = dest;
1338
1339 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1340 || GET_CODE (dest) == SIGN_EXTRACT
1341 || GET_CODE (dest) == ZERO_EXTRACT)
1342 dest = XEXP (dest, 0);
1343
1344 if (GET_CODE (src) == SUBREG)
1345 src = XEXP (src, 0);
1346
1347 /* If VAR does not appear at the top level of the SET
1348 just scan the lower levels of the tree. */
1349
1350 if (src != var && dest != var)
1351 break;
1352
1353 /* We will need to rerecognize this insn. */
1354 INSN_CODE (insn) = -1;
1355
1356#ifdef HAVE_insv
1357 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1358 {
1359 /* Since this case will return, ensure we fixup all the
1360 operands here. */
00d8a4c1
RK
1361 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1362 insn, replacements);
1363 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1364 insn, replacements);
1365 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1366 insn, replacements);
6f086dfc
RS
1367
1368 tem = XEXP (outerdest, 0);
1369
1370 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1371 that may appear inside a ZERO_EXTRACT.
1372 This was legitimate when the MEM was a REG. */
1373 if (GET_CODE (tem) == SUBREG
1374 && SUBREG_REG (tem) == var)
1375 tem = fixup_memory_subreg (tem, insn, 1);
1376 else
1377 tem = fixup_stack_1 (tem, insn);
1378
1379 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1380 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1381 && ! mode_dependent_address_p (XEXP (tem, 0))
1382 && ! MEM_VOLATILE_P (tem))
1383 {
1384 enum machine_mode wanted_mode
1385 = insn_operand_mode[(int) CODE_FOR_insv][0];
1386 enum machine_mode is_mode = GET_MODE (tem);
1387 int width = INTVAL (XEXP (outerdest, 1));
1388 int pos = INTVAL (XEXP (outerdest, 2));
1389
6dc42e49 1390 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
1391 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1392 {
1393 int offset = pos / BITS_PER_UNIT;
1394 rtx old_pos = XEXP (outerdest, 2);
1395 rtx newmem;
1396
1397#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1398 offset = (GET_MODE_SIZE (is_mode)
1399 - GET_MODE_SIZE (wanted_mode) - offset);
1400#endif
1401
1402 pos %= GET_MODE_BITSIZE (wanted_mode);
1403
1404 newmem = gen_rtx (MEM, wanted_mode,
1405 plus_constant (XEXP (tem, 0), offset));
1406 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1407 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1408 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1409
1410 /* Make the change and see if the insn remains valid. */
1411 INSN_CODE (insn) = -1;
1412 XEXP (outerdest, 0) = newmem;
5f4f0e22 1413 XEXP (outerdest, 2) = GEN_INT (pos);
6f086dfc
RS
1414
1415 if (recog_memoized (insn) >= 0)
1416 return;
1417
1418 /* Otherwise, restore old position. XEXP (x, 0) will be
1419 restored later. */
1420 XEXP (outerdest, 2) = old_pos;
1421 }
1422 }
1423
1424 /* If we get here, the bit-field store doesn't allow memory
1425 or isn't located at a constant position. Load the value into
1426 a register, do the store, and put it back into memory. */
1427
1428 tem1 = gen_reg_rtx (GET_MODE (tem));
1429 emit_insn_before (gen_move_insn (tem1, tem), insn);
1430 emit_insn_after (gen_move_insn (tem, tem1), insn);
1431 XEXP (outerdest, 0) = tem1;
1432 return;
1433 }
1434#endif
1435
1436 /* STRICT_LOW_PART is a no-op on memory references
1437 and it can cause combinations to be unrecognizable,
1438 so eliminate it. */
1439
1440 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1441 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1442
1443 /* A valid insn to copy VAR into or out of a register
1444 must be left alone, to avoid an infinite loop here.
1445 If the reference to VAR is by a subreg, fix that up,
1446 since SUBREG is not valid for a memref.
1447 Also fix up the address of the stack slot. */
1448
1449 if ((SET_SRC (x) == var
1450 || (GET_CODE (SET_SRC (x)) == SUBREG
1451 && SUBREG_REG (SET_SRC (x)) == var))
1452 && (GET_CODE (SET_DEST (x)) == REG
1453 || (GET_CODE (SET_DEST (x)) == SUBREG
1454 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1455 && recog_memoized (insn) >= 0)
1456 {
1457 replacement = find_replacement (replacements, SET_SRC (x));
1458 if (replacement->new)
1459 {
1460 SET_SRC (x) = replacement->new;
1461 return;
1462 }
1463 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1464 SET_SRC (x) = replacement->new
1465 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1466 else
1467 SET_SRC (x) = replacement->new
1468 = fixup_stack_1 (SET_SRC (x), insn);
1469 return;
1470 }
1471
1472 if ((SET_DEST (x) == var
1473 || (GET_CODE (SET_DEST (x)) == SUBREG
1474 && SUBREG_REG (SET_DEST (x)) == var))
1475 && (GET_CODE (SET_SRC (x)) == REG
1476 || (GET_CODE (SET_SRC (x)) == SUBREG
1477 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1478 && recog_memoized (insn) >= 0)
1479 {
1480 if (GET_CODE (SET_DEST (x)) == SUBREG)
1481 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1482 else
1483 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1484 return;
1485 }
1486
1487 /* Otherwise, storing into VAR must be handled specially
1488 by storing into a temporary and copying that into VAR
00d8a4c1
RK
1489 with a new insn after this one. Note that this case
1490 will be used when storing into a promoted scalar since
1491 the insn will now have different modes on the input
1492 and output and hence will be invalid (except for the case
1493 of setting it to a constant, which does not need any
1494 change if it is valid). We generate extra code in that case,
1495 but combine.c will eliminate it. */
6f086dfc
RS
1496
1497 if (dest == var)
1498 {
1499 rtx temp;
00d8a4c1
RK
1500 rtx fixeddest = SET_DEST (x);
1501
6f086dfc 1502 /* STRICT_LOW_PART can be discarded, around a MEM. */
00d8a4c1
RK
1503 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
1504 fixeddest = XEXP (fixeddest, 0);
6f086dfc 1505 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
00d8a4c1
RK
1506 if (GET_CODE (fixeddest) == SUBREG)
1507 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
6f086dfc 1508 else
00d8a4c1
RK
1509 fixeddest = fixup_stack_1 (fixeddest, insn);
1510
1511 temp = gen_reg_rtx (GET_MODE (SET_SRC (x)) == VOIDmode
1512 ? GET_MODE (fixeddest)
1513 : GET_MODE (SET_SRC (x)));
1514
1515 emit_insn_after (gen_move_insn (fixeddest,
1516 gen_lowpart (GET_MODE (fixeddest),
1517 temp)),
1518 insn);
6f086dfc 1519
6f086dfc
RS
1520 SET_DEST (x) = temp;
1521 }
1522 }
1523 }
1524
1525 /* Nothing special about this RTX; fix its operands. */
1526
1527 fmt = GET_RTX_FORMAT (code);
1528 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1529 {
1530 if (fmt[i] == 'e')
00d8a4c1 1531 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
6f086dfc
RS
1532 if (fmt[i] == 'E')
1533 {
1534 register int j;
1535 for (j = 0; j < XVECLEN (x, i); j++)
00d8a4c1
RK
1536 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
1537 insn, replacements);
6f086dfc
RS
1538 }
1539 }
1540}
1541\f
1542/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1543 return an rtx (MEM:m1 newaddr) which is equivalent.
1544 If any insns must be emitted to compute NEWADDR, put them before INSN.
1545
1546 UNCRITICAL nonzero means accept paradoxical subregs.
1547 This is used for subregs found inside of ZERO_EXTRACTs. */
1548
1549static rtx
1550fixup_memory_subreg (x, insn, uncritical)
1551 rtx x;
1552 rtx insn;
1553 int uncritical;
1554{
1555 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1556 rtx addr = XEXP (SUBREG_REG (x), 0);
1557 enum machine_mode mode = GET_MODE (x);
1558 rtx saved, result;
1559
1560 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1561 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1562 && ! uncritical)
1563 abort ();
1564
1565#if BYTES_BIG_ENDIAN
1566 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1567 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1568#endif
1569 addr = plus_constant (addr, offset);
1570 if (!flag_force_addr && memory_address_p (mode, addr))
1571 /* Shortcut if no insns need be emitted. */
1572 return change_address (SUBREG_REG (x), mode, addr);
1573 start_sequence ();
1574 result = change_address (SUBREG_REG (x), mode, addr);
1575 emit_insn_before (gen_sequence (), insn);
1576 end_sequence ();
1577 return result;
1578}
1579
1580/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1581 Replace subexpressions of X in place.
1582 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1583 Otherwise return X, with its contents possibly altered.
1584
1585 If any insns must be emitted to compute NEWADDR, put them before INSN. */
1586
1587static rtx
1588walk_fixup_memory_subreg (x, insn)
1589 register rtx x;
1590 rtx insn;
1591{
1592 register enum rtx_code code;
1593 register char *fmt;
1594 register int i;
1595
1596 if (x == 0)
1597 return 0;
1598
1599 code = GET_CODE (x);
1600
1601 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
1602 return fixup_memory_subreg (x, insn, 0);
1603
1604 /* Nothing special about this RTX; fix its operands. */
1605
1606 fmt = GET_RTX_FORMAT (code);
1607 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1608 {
1609 if (fmt[i] == 'e')
1610 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn);
1611 if (fmt[i] == 'E')
1612 {
1613 register int j;
1614 for (j = 0; j < XVECLEN (x, i); j++)
1615 XVECEXP (x, i, j)
1616 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn);
1617 }
1618 }
1619 return x;
1620}
1621\f
1622#if 0
1623/* Fix up any references to stack slots that are invalid memory addresses
1624 because they exceed the maximum range of a displacement. */
1625
1626void
1627fixup_stack_slots ()
1628{
1629 register rtx insn;
1630
1631 /* Did we generate a stack slot that is out of range
1632 or otherwise has an invalid address? */
1633 if (invalid_stack_slot)
1634 {
1635 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1636 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1637 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1638 || GET_CODE (insn) == JUMP_INSN)
1639 fixup_stack_1 (PATTERN (insn), insn);
1640 }
1641}
1642#endif
1643
1644/* For each memory ref within X, if it refers to a stack slot
1645 with an out of range displacement, put the address in a temp register
1646 (emitting new insns before INSN to load these registers)
1647 and alter the memory ref to use that register.
1648 Replace each such MEM rtx with a copy, to avoid clobberage. */
1649
1650static rtx
1651fixup_stack_1 (x, insn)
1652 rtx x;
1653 rtx insn;
1654{
1655 register int i;
1656 register RTX_CODE code = GET_CODE (x);
1657 register char *fmt;
1658
1659 if (code == MEM)
1660 {
1661 register rtx ad = XEXP (x, 0);
1662 /* If we have address of a stack slot but it's not valid
1663 (displacement is too large), compute the sum in a register. */
1664 if (GET_CODE (ad) == PLUS
1665 && GET_CODE (XEXP (ad, 0)) == REG
1666 && REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1667 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER
1668 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1669 {
1670 rtx temp, seq;
1671 if (memory_address_p (GET_MODE (x), ad))
1672 return x;
1673
1674 start_sequence ();
1675 temp = copy_to_reg (ad);
1676 seq = gen_sequence ();
1677 end_sequence ();
1678 emit_insn_before (seq, insn);
1679 return change_address (x, VOIDmode, temp);
1680 }
1681 return x;
1682 }
1683
1684 fmt = GET_RTX_FORMAT (code);
1685 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1686 {
1687 if (fmt[i] == 'e')
1688 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1689 if (fmt[i] == 'E')
1690 {
1691 register int j;
1692 for (j = 0; j < XVECLEN (x, i); j++)
1693 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
1694 }
1695 }
1696 return x;
1697}
1698\f
1699/* Optimization: a bit-field instruction whose field
1700 happens to be a byte or halfword in memory
1701 can be changed to a move instruction.
1702
1703 We call here when INSN is an insn to examine or store into a bit-field.
1704 BODY is the SET-rtx to be altered.
1705
1706 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1707 (Currently this is called only from function.c, and EQUIV_MEM
1708 is always 0.) */
1709
1710static void
1711optimize_bit_field (body, insn, equiv_mem)
1712 rtx body;
1713 rtx insn;
1714 rtx *equiv_mem;
1715{
1716 register rtx bitfield;
1717 int destflag;
1718 rtx seq = 0;
1719 enum machine_mode mode;
1720
1721 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
1722 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
1723 bitfield = SET_DEST (body), destflag = 1;
1724 else
1725 bitfield = SET_SRC (body), destflag = 0;
1726
1727 /* First check that the field being stored has constant size and position
1728 and is in fact a byte or halfword suitably aligned. */
1729
1730 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
1731 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
1732 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
1733 != BLKmode)
1734 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
1735 {
1736 register rtx memref = 0;
1737
1738 /* Now check that the containing word is memory, not a register,
1739 and that it is safe to change the machine mode. */
1740
1741 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
1742 memref = XEXP (bitfield, 0);
1743 else if (GET_CODE (XEXP (bitfield, 0)) == REG
1744 && equiv_mem != 0)
1745 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
1746 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1747 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
1748 memref = SUBREG_REG (XEXP (bitfield, 0));
1749 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1750 && equiv_mem != 0
1751 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
1752 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
1753
1754 if (memref
1755 && ! mode_dependent_address_p (XEXP (memref, 0))
1756 && ! MEM_VOLATILE_P (memref))
1757 {
1758 /* Now adjust the address, first for any subreg'ing
1759 that we are now getting rid of,
1760 and then for which byte of the word is wanted. */
1761
1762 register int offset = INTVAL (XEXP (bitfield, 2));
1763 /* Adjust OFFSET to count bits from low-address byte. */
1764#if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1765 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
1766 - offset - INTVAL (XEXP (bitfield, 1)));
1767#endif
1768 /* Adjust OFFSET to count bytes from low-address byte. */
1769 offset /= BITS_PER_UNIT;
1770 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
1771 {
1772 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
1773#if BYTES_BIG_ENDIAN
1774 offset -= (MIN (UNITS_PER_WORD,
1775 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
1776 - MIN (UNITS_PER_WORD,
1777 GET_MODE_SIZE (GET_MODE (memref))));
1778#endif
1779 }
1780
1781 memref = change_address (memref, mode,
1782 plus_constant (XEXP (memref, 0), offset));
1783
1784 /* Store this memory reference where
1785 we found the bit field reference. */
1786
1787 if (destflag)
1788 {
1789 validate_change (insn, &SET_DEST (body), memref, 1);
1790 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
1791 {
1792 rtx src = SET_SRC (body);
1793 while (GET_CODE (src) == SUBREG
1794 && SUBREG_WORD (src) == 0)
1795 src = SUBREG_REG (src);
1796 if (GET_MODE (src) != GET_MODE (memref))
1797 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
1798 validate_change (insn, &SET_SRC (body), src, 1);
1799 }
1800 else if (GET_MODE (SET_SRC (body)) != VOIDmode
1801 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
1802 /* This shouldn't happen because anything that didn't have
1803 one of these modes should have got converted explicitly
1804 and then referenced through a subreg.
1805 This is so because the original bit-field was
1806 handled by agg_mode and so its tree structure had
1807 the same mode that memref now has. */
1808 abort ();
1809 }
1810 else
1811 {
1812 rtx dest = SET_DEST (body);
1813
1814 while (GET_CODE (dest) == SUBREG
1815 && SUBREG_WORD (dest) == 0)
1816 dest = SUBREG_REG (dest);
1817
1818 validate_change (insn, &SET_DEST (body), dest, 1);
1819
1820 if (GET_MODE (dest) == GET_MODE (memref))
1821 validate_change (insn, &SET_SRC (body), memref, 1);
1822 else
1823 {
1824 /* Convert the mem ref to the destination mode. */
1825 rtx newreg = gen_reg_rtx (GET_MODE (dest));
1826
1827 start_sequence ();
1828 convert_move (newreg, memref,
1829 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
1830 seq = get_insns ();
1831 end_sequence ();
1832
1833 validate_change (insn, &SET_SRC (body), newreg, 1);
1834 }
1835 }
1836
1837 /* See if we can convert this extraction or insertion into
1838 a simple move insn. We might not be able to do so if this
1839 was, for example, part of a PARALLEL.
1840
1841 If we succeed, write out any needed conversions. If we fail,
1842 it is hard to guess why we failed, so don't do anything
1843 special; just let the optimization be suppressed. */
1844
1845 if (apply_change_group () && seq)
1846 emit_insns_before (seq, insn);
1847 }
1848 }
1849}
1850\f
1851/* These routines are responsible for converting virtual register references
1852 to the actual hard register references once RTL generation is complete.
1853
1854 The following four variables are used for communication between the
1855 routines. They contain the offsets of the virtual registers from their
1856 respective hard registers. */
1857
1858static int in_arg_offset;
1859static int var_offset;
1860static int dynamic_offset;
1861static int out_arg_offset;
1862
1863/* In most machines, the stack pointer register is equivalent to the bottom
1864 of the stack. */
1865
1866#ifndef STACK_POINTER_OFFSET
1867#define STACK_POINTER_OFFSET 0
1868#endif
1869
1870/* If not defined, pick an appropriate default for the offset of dynamically
1871 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1872 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1873
1874#ifndef STACK_DYNAMIC_OFFSET
1875
1876#ifdef ACCUMULATE_OUTGOING_ARGS
1877/* The bottom of the stack points to the actual arguments. If
1878 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1879 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1880 stack space for register parameters is not pushed by the caller, but
1881 rather part of the fixed stack areas and hence not included in
1882 `current_function_outgoing_args_size'. Nevertheless, we must allow
1883 for it when allocating stack dynamic objects. */
1884
1885#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1886#define STACK_DYNAMIC_OFFSET(FNDECL) \
1887(current_function_outgoing_args_size \
1888 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
1889
1890#else
1891#define STACK_DYNAMIC_OFFSET(FNDECL) \
1892(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
1893#endif
1894
1895#else
1896#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
1897#endif
1898#endif
1899
1900/* Pass through the INSNS of function FNDECL and convert virtual register
1901 references to hard register references. */
1902
1903void
1904instantiate_virtual_regs (fndecl, insns)
1905 tree fndecl;
1906 rtx insns;
1907{
1908 rtx insn;
1909
1910 /* Compute the offsets to use for this function. */
1911 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
1912 var_offset = STARTING_FRAME_OFFSET;
1913 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
1914 out_arg_offset = STACK_POINTER_OFFSET;
1915
1916 /* Scan all variables and parameters of this function. For each that is
1917 in memory, instantiate all virtual registers if the result is a valid
1918 address. If not, we do it later. That will handle most uses of virtual
1919 regs on many machines. */
1920 instantiate_decls (fndecl, 1);
1921
1922 /* Initialize recognition, indicating that volatile is OK. */
1923 init_recog ();
1924
1925 /* Scan through all the insns, instantiating every virtual register still
1926 present. */
1927 for (insn = insns; insn; insn = NEXT_INSN (insn))
1928 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1929 || GET_CODE (insn) == CALL_INSN)
1930 {
1931 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
5f4f0e22 1932 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
6f086dfc
RS
1933 }
1934
1935 /* Now instantiate the remaining register equivalences for debugging info.
1936 These will not be valid addresses. */
1937 instantiate_decls (fndecl, 0);
1938
1939 /* Indicate that, from now on, assign_stack_local should use
1940 frame_pointer_rtx. */
1941 virtuals_instantiated = 1;
1942}
1943
1944/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1945 all virtual registers in their DECL_RTL's.
1946
1947 If VALID_ONLY, do this only if the resulting address is still valid.
1948 Otherwise, always do it. */
1949
1950static void
1951instantiate_decls (fndecl, valid_only)
1952 tree fndecl;
1953 int valid_only;
1954{
1955 tree decl;
1956
a82ad570 1957 if (DECL_INLINE (fndecl))
6f086dfc
RS
1958 /* When compiling an inline function, the obstack used for
1959 rtl allocation is the maybepermanent_obstack. Calling
1960 `resume_temporary_allocation' switches us back to that
1961 obstack while we process this function's parameters. */
1962 resume_temporary_allocation ();
1963
1964 /* Process all parameters of the function. */
1965 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1966 {
5a73491b
RK
1967 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
1968 valid_only);
1969 instantiate_decl (DECL_INCOMING_RTL (decl),
1970 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
6f086dfc
RS
1971 }
1972
1973 /* Now process all variables defined in the function or its subblocks. */
1974 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1975
a82ad570 1976 if (DECL_INLINE (fndecl))
6f086dfc
RS
1977 {
1978 /* Save all rtl allocated for this function by raising the
1979 high-water mark on the maybepermanent_obstack. */
1980 preserve_data ();
1981 /* All further rtl allocation is now done in the current_obstack. */
1982 rtl_in_current_obstack ();
1983 }
1984}
1985
1986/* Subroutine of instantiate_decls: Process all decls in the given
1987 BLOCK node and all its subblocks. */
1988
1989static void
1990instantiate_decls_1 (let, valid_only)
1991 tree let;
1992 int valid_only;
1993{
1994 tree t;
1995
1996 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
5a73491b
RK
1997 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
1998 valid_only);
6f086dfc
RS
1999
2000 /* Process all subblocks. */
2001 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2002 instantiate_decls_1 (t, valid_only);
2003}
5a73491b
RK
2004
2005/* Subroutine of the preceeding procedures: Given RTL representing a
2006 decl and the size of the object, do any instantiation required.
2007
2008 If VALID_ONLY is non-zero, it means that the RTL should only be
2009 changed if the new address is valid. */
2010
2011static void
2012instantiate_decl (x, size, valid_only)
2013 rtx x;
2014 int size;
2015 int valid_only;
2016{
2017 enum machine_mode mode;
2018 rtx addr;
2019
2020 /* If this is not a MEM, no need to do anything. Similarly if the
2021 address is a constant or a register that is not a virtual register. */
2022
2023 if (x == 0 || GET_CODE (x) != MEM)
2024 return;
2025
2026 addr = XEXP (x, 0);
2027 if (CONSTANT_P (addr)
2028 || (GET_CODE (addr) == REG
2029 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2030 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2031 return;
2032
2033 /* If we should only do this if the address is valid, copy the address.
2034 We need to do this so we can undo any changes that might make the
2035 address invalid. This copy is unfortunate, but probably can't be
2036 avoided. */
2037
2038 if (valid_only)
2039 addr = copy_rtx (addr);
2040
2041 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2042
2043 if (! valid_only)
2044 return;
2045
2046 /* Now verify that the resulting address is valid for every integer or
2047 floating-point mode up to and including SIZE bytes long. We do this
2048 since the object might be accessed in any mode and frame addresses
2049 are shared. */
2050
2051 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2052 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2053 mode = GET_MODE_WIDER_MODE (mode))
2054 if (! memory_address_p (mode, addr))
2055 return;
2056
2057 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2058 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2059 mode = GET_MODE_WIDER_MODE (mode))
2060 if (! memory_address_p (mode, addr))
2061 return;
2062
2063 /* Otherwise, put back the address, now that we have updated it and we
2064 know it is valid. */
2065
2066 XEXP (x, 0) = addr;
2067}
6f086dfc
RS
2068\f
2069/* Given a pointer to a piece of rtx and an optional pointer to the
2070 containing object, instantiate any virtual registers present in it.
2071
2072 If EXTRA_INSNS, we always do the replacement and generate
2073 any extra insns before OBJECT. If it zero, we do nothing if replacement
2074 is not valid.
2075
2076 Return 1 if we either had nothing to do or if we were able to do the
2077 needed replacement. Return 0 otherwise; we only return zero if
2078 EXTRA_INSNS is zero.
2079
2080 We first try some simple transformations to avoid the creation of extra
2081 pseudos. */
2082
2083static int
2084instantiate_virtual_regs_1 (loc, object, extra_insns)
2085 rtx *loc;
2086 rtx object;
2087 int extra_insns;
2088{
2089 rtx x;
2090 RTX_CODE code;
2091 rtx new = 0;
2092 int offset;
2093 rtx temp;
2094 rtx seq;
2095 int i, j;
2096 char *fmt;
2097
2098 /* Re-start here to avoid recursion in common cases. */
2099 restart:
2100
2101 x = *loc;
2102 if (x == 0)
2103 return 1;
2104
2105 code = GET_CODE (x);
2106
2107 /* Check for some special cases. */
2108 switch (code)
2109 {
2110 case CONST_INT:
2111 case CONST_DOUBLE:
2112 case CONST:
2113 case SYMBOL_REF:
2114 case CODE_LABEL:
2115 case PC:
2116 case CC0:
2117 case ASM_INPUT:
2118 case ADDR_VEC:
2119 case ADDR_DIFF_VEC:
2120 case RETURN:
2121 return 1;
2122
2123 case SET:
2124 /* We are allowed to set the virtual registers. This means that
2125 that the actual register should receive the source minus the
2126 appropriate offset. This is used, for example, in the handling
2127 of non-local gotos. */
2128 if (SET_DEST (x) == virtual_incoming_args_rtx)
2129 new = arg_pointer_rtx, offset = - in_arg_offset;
2130 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2131 new = frame_pointer_rtx, offset = - var_offset;
2132 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2133 new = stack_pointer_rtx, offset = - dynamic_offset;
2134 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2135 new = stack_pointer_rtx, offset = - out_arg_offset;
2136
2137 if (new)
2138 {
2139 /* The only valid sources here are PLUS or REG. Just do
2140 the simplest possible thing to handle them. */
2141 if (GET_CODE (SET_SRC (x)) != REG
2142 && GET_CODE (SET_SRC (x)) != PLUS)
2143 abort ();
2144
2145 start_sequence ();
2146 if (GET_CODE (SET_SRC (x)) != REG)
5f4f0e22 2147 temp = force_operand (SET_SRC (x), NULL_RTX);
6f086dfc
RS
2148 else
2149 temp = SET_SRC (x);
5f4f0e22 2150 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
6f086dfc
RS
2151 seq = get_insns ();
2152 end_sequence ();
2153
2154 emit_insns_before (seq, object);
2155 SET_DEST (x) = new;
2156
2157 if (!validate_change (object, &SET_SRC (x), temp, 0)
2158 || ! extra_insns)
2159 abort ();
2160
2161 return 1;
2162 }
2163
2164 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2165 loc = &SET_SRC (x);
2166 goto restart;
2167
2168 case PLUS:
2169 /* Handle special case of virtual register plus constant. */
2170 if (CONSTANT_P (XEXP (x, 1)))
2171 {
2172 rtx old;
2173
2174 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2175 if (GET_CODE (XEXP (x, 0)) == PLUS)
2176 {
2177 rtx inner = XEXP (XEXP (x, 0), 0);
2178
2179 if (inner == virtual_incoming_args_rtx)
2180 new = arg_pointer_rtx, offset = in_arg_offset;
2181 else if (inner == virtual_stack_vars_rtx)
2182 new = frame_pointer_rtx, offset = var_offset;
2183 else if (inner == virtual_stack_dynamic_rtx)
2184 new = stack_pointer_rtx, offset = dynamic_offset;
2185 else if (inner == virtual_outgoing_args_rtx)
2186 new = stack_pointer_rtx, offset = out_arg_offset;
2187 else
2188 {
2189 loc = &XEXP (x, 0);
2190 goto restart;
2191 }
2192
2193 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2194 extra_insns);
2195 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2196 }
2197
2198 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2199 new = arg_pointer_rtx, offset = in_arg_offset;
2200 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2201 new = frame_pointer_rtx, offset = var_offset;
2202 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2203 new = stack_pointer_rtx, offset = dynamic_offset;
2204 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2205 new = stack_pointer_rtx, offset = out_arg_offset;
2206 else
2207 {
2208 /* We know the second operand is a constant. Unless the
2209 first operand is a REG (which has been already checked),
2210 it needs to be checked. */
2211 if (GET_CODE (XEXP (x, 0)) != REG)
2212 {
2213 loc = &XEXP (x, 0);
2214 goto restart;
2215 }
2216 return 1;
2217 }
2218
2219 old = XEXP (x, 0);
2220 XEXP (x, 0) = new;
2221 new = plus_constant (XEXP (x, 1), offset);
2222
2223 /* If the new constant is zero, try to replace the sum with its
2224 first operand. */
2225 if (new == const0_rtx
2226 && validate_change (object, loc, XEXP (x, 0), 0))
2227 return 1;
2228
2229 /* Next try to replace constant with new one. */
2230 if (!validate_change (object, &XEXP (x, 1), new, 0))
2231 {
2232 if (! extra_insns)
2233 {
2234 XEXP (x, 0) = old;
2235 return 0;
2236 }
2237
2238 /* Otherwise copy the new constant into a register and replace
2239 constant with that register. */
2240 temp = gen_reg_rtx (Pmode);
2241 if (validate_change (object, &XEXP (x, 1), temp, 0))
2242 emit_insn_before (gen_move_insn (temp, new), object);
2243 else
2244 {
2245 /* If that didn't work, replace this expression with a
2246 register containing the sum. */
2247
2248 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2249 XEXP (x, 0) = old;
2250
2251 start_sequence ();
5f4f0e22 2252 temp = force_operand (new, NULL_RTX);
6f086dfc
RS
2253 seq = get_insns ();
2254 end_sequence ();
2255
2256 emit_insns_before (seq, object);
2257 if (! validate_change (object, loc, temp, 0)
2258 && ! validate_replace_rtx (x, temp, object))
2259 abort ();
2260 }
2261 }
2262
2263 return 1;
2264 }
2265
2266 /* Fall through to generic two-operand expression case. */
2267 case EXPR_LIST:
2268 case CALL:
2269 case COMPARE:
2270 case MINUS:
2271 case MULT:
2272 case DIV: case UDIV:
2273 case MOD: case UMOD:
2274 case AND: case IOR: case XOR:
2275 case LSHIFT: case ASHIFT: case ROTATE:
2276 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2277 case NE: case EQ:
2278 case GE: case GT: case GEU: case GTU:
2279 case LE: case LT: case LEU: case LTU:
2280 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2281 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2282 loc = &XEXP (x, 0);
2283 goto restart;
2284
2285 case MEM:
2286 /* Most cases of MEM that convert to valid addresses have already been
2287 handled by our scan of regno_reg_rtx. The only special handling we
2288 need here is to make a copy of the rtx to ensure it isn't being
b335c2cc 2289 shared if we have to change it to a pseudo.
6f086dfc
RS
2290
2291 If the rtx is a simple reference to an address via a virtual register,
2292 it can potentially be shared. In such cases, first try to make it
2293 a valid address, which can also be shared. Otherwise, copy it and
2294 proceed normally.
2295
2296 First check for common cases that need no processing. These are
2297 usually due to instantiation already being done on a previous instance
2298 of a shared rtx. */
2299
2300 temp = XEXP (x, 0);
2301 if (CONSTANT_ADDRESS_P (temp)
2302#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2303 || temp == arg_pointer_rtx
2304#endif
2305 || temp == frame_pointer_rtx)
2306 return 1;
2307
2308 if (GET_CODE (temp) == PLUS
2309 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2310 && (XEXP (temp, 0) == frame_pointer_rtx
2311#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2312 || XEXP (temp, 0) == arg_pointer_rtx
2313#endif
2314 ))
2315 return 1;
2316
2317 if (temp == virtual_stack_vars_rtx
2318 || temp == virtual_incoming_args_rtx
2319 || (GET_CODE (temp) == PLUS
2320 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2321 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2322 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2323 {
2324 /* This MEM may be shared. If the substitution can be done without
2325 the need to generate new pseudos, we want to do it in place
2326 so all copies of the shared rtx benefit. The call below will
2327 only make substitutions if the resulting address is still
2328 valid.
2329
2330 Note that we cannot pass X as the object in the recursive call
2331 since the insn being processed may not allow all valid
6461be14
RS
2332 addresses. However, if we were not passed on object, we can
2333 only modify X without copying it if X will have a valid
2334 address.
6f086dfc 2335
6461be14
RS
2336 ??? Also note that this can still lose if OBJECT is an insn that
2337 has less restrictions on an address that some other insn.
2338 In that case, we will modify the shared address. This case
2339 doesn't seem very likely, though. */
2340
2341 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2342 object ? object : x, 0))
6f086dfc
RS
2343 return 1;
2344
2345 /* Otherwise make a copy and process that copy. We copy the entire
2346 RTL expression since it might be a PLUS which could also be
2347 shared. */
2348 *loc = x = copy_rtx (x);
2349 }
2350
2351 /* Fall through to generic unary operation case. */
2352 case USE:
2353 case CLOBBER:
2354 case SUBREG:
2355 case STRICT_LOW_PART:
2356 case NEG: case NOT:
2357 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2358 case SIGN_EXTEND: case ZERO_EXTEND:
2359 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2360 case FLOAT: case FIX:
2361 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2362 case ABS:
2363 case SQRT:
2364 case FFS:
2365 /* These case either have just one operand or we know that we need not
2366 check the rest of the operands. */
2367 loc = &XEXP (x, 0);
2368 goto restart;
2369
2370 case REG:
2371 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2372 in front of this insn and substitute the temporary. */
2373 if (x == virtual_incoming_args_rtx)
2374 new = arg_pointer_rtx, offset = in_arg_offset;
2375 else if (x == virtual_stack_vars_rtx)
2376 new = frame_pointer_rtx, offset = var_offset;
2377 else if (x == virtual_stack_dynamic_rtx)
2378 new = stack_pointer_rtx, offset = dynamic_offset;
2379 else if (x == virtual_outgoing_args_rtx)
2380 new = stack_pointer_rtx, offset = out_arg_offset;
2381
2382 if (new)
2383 {
2384 temp = plus_constant (new, offset);
2385 if (!validate_change (object, loc, temp, 0))
2386 {
2387 if (! extra_insns)
2388 return 0;
2389
2390 start_sequence ();
5f4f0e22 2391 temp = force_operand (temp, NULL_RTX);
6f086dfc
RS
2392 seq = get_insns ();
2393 end_sequence ();
2394
2395 emit_insns_before (seq, object);
2396 if (! validate_change (object, loc, temp, 0)
2397 && ! validate_replace_rtx (x, temp, object))
2398 abort ();
2399 }
2400 }
2401
2402 return 1;
2403 }
2404
2405 /* Scan all subexpressions. */
2406 fmt = GET_RTX_FORMAT (code);
2407 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2408 if (*fmt == 'e')
2409 {
2410 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2411 return 0;
2412 }
2413 else if (*fmt == 'E')
2414 for (j = 0; j < XVECLEN (x, i); j++)
2415 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2416 extra_insns))
2417 return 0;
2418
2419 return 1;
2420}
2421\f
2422/* Optimization: assuming this function does not receive nonlocal gotos,
2423 delete the handlers for such, as well as the insns to establish
2424 and disestablish them. */
2425
2426static void
2427delete_handlers ()
2428{
2429 rtx insn;
2430 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2431 {
2432 /* Delete the handler by turning off the flag that would
2433 prevent jump_optimize from deleting it.
2434 Also permit deletion of the nonlocal labels themselves
2435 if nothing local refers to them. */
2436 if (GET_CODE (insn) == CODE_LABEL)
2437 LABEL_PRESERVE_P (insn) = 0;
2438 if (GET_CODE (insn) == INSN
59257ff7
RK
2439 && ((nonlocal_goto_handler_slot != 0
2440 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
2441 || (nonlocal_goto_stack_level != 0
2442 && reg_mentioned_p (nonlocal_goto_stack_level,
2443 PATTERN (insn)))))
6f086dfc
RS
2444 delete_insn (insn);
2445 }
2446}
2447
2448/* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2449 of the current function. */
2450
2451rtx
2452nonlocal_label_rtx_list ()
2453{
2454 tree t;
2455 rtx x = 0;
2456
2457 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2458 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2459
2460 return x;
2461}
2462\f
2463/* Output a USE for any register use in RTL.
2464 This is used with -noreg to mark the extent of lifespan
2465 of any registers used in a user-visible variable's DECL_RTL. */
2466
2467void
2468use_variable (rtl)
2469 rtx rtl;
2470{
2471 if (GET_CODE (rtl) == REG)
2472 /* This is a register variable. */
2473 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2474 else if (GET_CODE (rtl) == MEM
2475 && GET_CODE (XEXP (rtl, 0)) == REG
2476 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2477 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2478 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2479 /* This is a variable-sized structure. */
2480 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2481}
2482
2483/* Like use_variable except that it outputs the USEs after INSN
2484 instead of at the end of the insn-chain. */
2485
2486void
2487use_variable_after (rtl, insn)
2488 rtx rtl, insn;
2489{
2490 if (GET_CODE (rtl) == REG)
2491 /* This is a register variable. */
2492 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2493 else if (GET_CODE (rtl) == MEM
2494 && GET_CODE (XEXP (rtl, 0)) == REG
2495 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2496 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2497 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2498 /* This is a variable-sized structure. */
2499 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2500}
2501\f
2502int
2503max_parm_reg_num ()
2504{
2505 return max_parm_reg;
2506}
2507
2508/* Return the first insn following those generated by `assign_parms'. */
2509
2510rtx
2511get_first_nonparm_insn ()
2512{
2513 if (last_parm_insn)
2514 return NEXT_INSN (last_parm_insn);
2515 return get_insns ();
2516}
2517
2518/* Return 1 if EXP returns an aggregate value, for which an address
2519 must be passed to the function or returned by the function. */
2520
2521int
2522aggregate_value_p (exp)
2523 tree exp;
2524{
2525 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2526 return 1;
2527 if (RETURN_IN_MEMORY (TREE_TYPE (exp)))
2528 return 1;
2529 if (flag_pcc_struct_return
2530 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2531 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE))
2532 return 1;
2533 return 0;
2534}
2535\f
2536/* Assign RTL expressions to the function's parameters.
2537 This may involve copying them into registers and using
2538 those registers as the RTL for them.
2539
2540 If SECOND_TIME is non-zero it means that this function is being
2541 called a second time. This is done by integrate.c when a function's
2542 compilation is deferred. We need to come back here in case the
2543 FUNCTION_ARG macro computes items needed for the rest of the compilation
2544 (such as changing which registers are fixed or caller-saved). But suppress
2545 writing any insns or setting DECL_RTL of anything in this case. */
2546
2547void
2548assign_parms (fndecl, second_time)
2549 tree fndecl;
2550 int second_time;
2551{
2552 register tree parm;
2553 register rtx entry_parm = 0;
2554 register rtx stack_parm = 0;
2555 CUMULATIVE_ARGS args_so_far;
2556 enum machine_mode passed_mode, nominal_mode;
00d8a4c1 2557 int unsignedp;
6f086dfc
RS
2558 /* Total space needed so far for args on the stack,
2559 given as a constant and a tree-expression. */
2560 struct args_size stack_args_size;
2561 tree fntype = TREE_TYPE (fndecl);
2562 tree fnargs = DECL_ARGUMENTS (fndecl);
2563 /* This is used for the arg pointer when referring to stack args. */
2564 rtx internal_arg_pointer;
2565 /* This is a dummy PARM_DECL that we used for the function result if
2566 the function returns a structure. */
2567 tree function_result_decl = 0;
2568 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2569 int varargs_setup = 0;
2570
2571 /* Nonzero if the last arg is named `__builtin_va_alist',
2572 which is used on some machines for old-fashioned non-ANSI varargs.h;
2573 this should be stuck onto the stack as if it had arrived there. */
2574 int vararg
2575 = (fnargs
2576 && (parm = tree_last (fnargs)) != 0
2577 && DECL_NAME (parm)
2578 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2579 "__builtin_va_alist")));
2580
2581 /* Nonzero if function takes extra anonymous args.
2582 This means the last named arg must be on the stack
2583 right before the anonymous ones. */
2584 int stdarg
2585 = (TYPE_ARG_TYPES (fntype) != 0
2586 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2587 != void_type_node));
2588
2589 /* If the reg that the virtual arg pointer will be translated into is
2590 not a fixed reg or is the stack pointer, make a copy of the virtual
2591 arg pointer, and address parms via the copy. The frame pointer is
2592 considered fixed even though it is not marked as such.
2593
2594 The second time through, simply use ap to avoid generating rtx. */
2595
2596 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2597 || ! (fixed_regs[ARG_POINTER_REGNUM]
2598 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2599 && ! second_time)
2600 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2601 else
2602 internal_arg_pointer = virtual_incoming_args_rtx;
2603 current_function_internal_arg_pointer = internal_arg_pointer;
2604
2605 stack_args_size.constant = 0;
2606 stack_args_size.var = 0;
2607
2608 /* If struct value address is treated as the first argument, make it so. */
2609 if (aggregate_value_p (DECL_RESULT (fndecl))
2610 && ! current_function_returns_pcc_struct
2611 && struct_value_incoming_rtx == 0)
2612 {
2613 tree type = build_pointer_type (fntype);
2614
5f4f0e22 2615 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
6f086dfc
RS
2616
2617 DECL_ARG_TYPE (function_result_decl) = type;
2618 TREE_CHAIN (function_result_decl) = fnargs;
2619 fnargs = function_result_decl;
2620 }
2621
2622 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2623 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2624
2625#ifdef INIT_CUMULATIVE_INCOMING_ARGS
5f4f0e22 2626 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_PTR);
6f086dfc 2627#else
5f4f0e22 2628 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_PTR);
6f086dfc
RS
2629#endif
2630
2631 /* We haven't yet found an argument that we must push and pretend the
2632 caller did. */
2633 current_function_pretend_args_size = 0;
2634
2635 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2636 {
2637 int aggregate
2638 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2639 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
2640 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE);
2641 struct args_size stack_offset;
2642 struct args_size arg_size;
2643 int passed_pointer = 0;
2644 tree passed_type = DECL_ARG_TYPE (parm);
2645
2646 /* Set LAST_NAMED if this is last named arg before some
2647 anonymous args. We treat it as if it were anonymous too. */
2648 int last_named = ((TREE_CHAIN (parm) == 0
2649 || DECL_NAME (TREE_CHAIN (parm)) == 0)
2650 && (vararg || stdarg));
2651
2652 if (TREE_TYPE (parm) == error_mark_node
2653 /* This can happen after weird syntax errors
2654 or if an enum type is defined among the parms. */
2655 || TREE_CODE (parm) != PARM_DECL
2656 || passed_type == NULL)
2657 {
2658 DECL_RTL (parm) = gen_rtx (MEM, BLKmode, const0_rtx);
2659 TREE_USED (parm) = 1;
2660 continue;
2661 }
2662
2663 /* For varargs.h function, save info about regs and stack space
2664 used by the individual args, not including the va_alist arg. */
2665 if (vararg && last_named)
2666 current_function_args_info = args_so_far;
2667
2668 /* Find mode of arg as it is passed, and mode of arg
2669 as it should be during execution of this function. */
2670 passed_mode = TYPE_MODE (passed_type);
2671 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
2672
16bae307
RS
2673 /* If the parm's mode is VOID, its value doesn't matter,
2674 and avoid the usual things like emit_move_insn that could crash. */
2675 if (nominal_mode == VOIDmode)
2676 {
2677 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
2678 continue;
2679 }
2680
6f086dfc
RS
2681#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2682 /* See if this arg was passed by invisible reference. */
2683 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2684 passed_type, ! last_named))
2685 {
2686 passed_type = build_pointer_type (passed_type);
2687 passed_pointer = 1;
2688 passed_mode = nominal_mode = Pmode;
2689 }
2690#endif
2691
2692 /* Let machine desc say which reg (if any) the parm arrives in.
2693 0 means it arrives on the stack. */
2694#ifdef FUNCTION_INCOMING_ARG
2695 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2696 passed_type, ! last_named);
2697#else
2698 entry_parm = FUNCTION_ARG (args_so_far, passed_mode,
2699 passed_type, ! last_named);
2700#endif
2701
2702#ifdef SETUP_INCOMING_VARARGS
2703 /* If this is the last named parameter, do any required setup for
2704 varargs or stdargs. We need to know about the case of this being an
2705 addressable type, in which case we skip the registers it
2706 would have arrived in.
2707
2708 For stdargs, LAST_NAMED will be set for two parameters, the one that
2709 is actually the last named, and the dummy parameter. We only
2710 want to do this action once.
2711
2712 Also, indicate when RTL generation is to be suppressed. */
2713 if (last_named && !varargs_setup)
2714 {
2715 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
2716 current_function_pretend_args_size,
2717 second_time);
2718 varargs_setup = 1;
2719 }
2720#endif
2721
2722 /* Determine parm's home in the stack,
2723 in case it arrives in the stack or we should pretend it did.
2724
2725 Compute the stack position and rtx where the argument arrives
2726 and its size.
2727
2728 There is one complexity here: If this was a parameter that would
2729 have been passed in registers, but wasn't only because it is
2730 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2731 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2732 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2733 0 as it was the previous time. */
2734
2735 locate_and_pad_parm (passed_mode, passed_type,
2736#ifdef STACK_PARMS_IN_REG_PARM_AREA
2737 1,
2738#else
2739#ifdef FUNCTION_INCOMING_ARG
2740 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2741 passed_type,
2742 (! last_named
2743 || varargs_setup)) != 0,
2744#else
2745 FUNCTION_ARG (args_so_far, passed_mode,
2746 passed_type,
2747 ! last_named || varargs_setup) != 0,
2748#endif
2749#endif
2750 fndecl, &stack_args_size, &stack_offset, &arg_size);
2751
2752 if (! second_time)
2753 {
2754 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
2755
2756 if (offset_rtx == const0_rtx)
2757 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
2758 else
2759 stack_parm = gen_rtx (MEM, passed_mode,
2760 gen_rtx (PLUS, Pmode,
2761 internal_arg_pointer, offset_rtx));
2762
2763 /* If this is a memory ref that contains aggregate components,
2764 mark it as such for cse and loop optimize. */
2765 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2766 }
2767
2768 /* If this parameter was passed both in registers and in the stack,
2769 use the copy on the stack. */
2770 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
2771 entry_parm = 0;
2772
2773 /* If this parm was passed part in regs and part in memory,
2774 pretend it arrived entirely in memory
2775 by pushing the register-part onto the stack.
2776
2777 In the special case of a DImode or DFmode that is split,
2778 we could put it together in a pseudoreg directly,
2779 but for now that's not worth bothering with. */
2780
2781 if (entry_parm)
2782 {
2783 int nregs = 0;
2784#ifdef FUNCTION_ARG_PARTIAL_NREGS
2785 nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
2786 passed_type, ! last_named);
2787#endif
2788
2789 if (nregs > 0)
2790 {
2791 current_function_pretend_args_size
2792 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
2793 / (PARM_BOUNDARY / BITS_PER_UNIT)
2794 * (PARM_BOUNDARY / BITS_PER_UNIT));
2795
2796 if (! second_time)
2797 move_block_from_reg (REGNO (entry_parm),
2798 validize_mem (stack_parm), nregs);
2799 entry_parm = stack_parm;
2800 }
2801 }
2802
2803 /* If we didn't decide this parm came in a register,
2804 by default it came on the stack. */
2805 if (entry_parm == 0)
2806 entry_parm = stack_parm;
2807
2808 /* Record permanently how this parm was passed. */
2809 if (! second_time)
2810 DECL_INCOMING_RTL (parm) = entry_parm;
2811
2812 /* If there is actually space on the stack for this parm,
2813 count it in stack_args_size; otherwise set stack_parm to 0
2814 to indicate there is no preallocated stack slot for the parm. */
2815
2816 if (entry_parm == stack_parm
d9ca49d5 2817#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
6f086dfc 2818 /* On some machines, even if a parm value arrives in a register
d9ca49d5
JW
2819 there is still an (uninitialized) stack slot allocated for it.
2820
2821 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
2822 whether this parameter already has a stack slot allocated,
2823 because an arg block exists only if current_function_args_size
2824 is larger than some threshhold, and we haven't calculated that
2825 yet. So, for now, we just assume that stack slots never exist
2826 in this case. */
6f086dfc
RS
2827 || REG_PARM_STACK_SPACE (fndecl) > 0
2828#endif
2829 )
2830 {
2831 stack_args_size.constant += arg_size.constant;
2832 if (arg_size.var)
2833 ADD_PARM_SIZE (stack_args_size, arg_size.var);
2834 }
2835 else
2836 /* No stack slot was pushed for this parm. */
2837 stack_parm = 0;
2838
2839 /* Update info on where next arg arrives in registers. */
2840
2841 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
2842 passed_type, ! last_named);
2843
2844 /* If this is our second time through, we are done with this parm. */
2845 if (second_time)
2846 continue;
2847
e16c591a
RS
2848 /* If we can't trust the parm stack slot to be aligned enough
2849 for its ultimate type, don't use that slot after entry.
2850 We'll make another stack slot, if we need one. */
2851 {
2852#ifdef FUNCTION_ARG_BOUNDARY
2853 int thisparm_boundary
2854 = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type);
2855#else
2856 int thisparm_boundary = PARM_BOUNDARY;
2857#endif
2858
2859 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
2860 stack_parm = 0;
2861 }
2862
6f086dfc
RS
2863 /* Now adjust STACK_PARM to the mode and precise location
2864 where this parameter should live during execution,
2865 if we discover that it must live in the stack during execution.
2866 To make debuggers happier on big-endian machines, we store
2867 the value in the last bytes of the space available. */
2868
2869 if (nominal_mode != BLKmode && nominal_mode != passed_mode
2870 && stack_parm != 0)
2871 {
2872 rtx offset_rtx;
2873
2874#if BYTES_BIG_ENDIAN
2875 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
2876 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
2877 - GET_MODE_SIZE (nominal_mode));
2878#endif
2879
2880 offset_rtx = ARGS_SIZE_RTX (stack_offset);
2881 if (offset_rtx == const0_rtx)
2882 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
2883 else
2884 stack_parm = gen_rtx (MEM, nominal_mode,
2885 gen_rtx (PLUS, Pmode,
2886 internal_arg_pointer, offset_rtx));
2887
2888 /* If this is a memory ref that contains aggregate components,
2889 mark it as such for cse and loop optimize. */
2890 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2891 }
2892
2893 /* ENTRY_PARM is an RTX for the parameter as it arrives,
2894 in the mode in which it arrives.
2895 STACK_PARM is an RTX for a stack slot where the parameter can live
2896 during the function (in case we want to put it there).
2897 STACK_PARM is 0 if no stack slot was pushed for it.
2898
2899 Now output code if necessary to convert ENTRY_PARM to
2900 the type in which this function declares it,
2901 and store that result in an appropriate place,
2902 which may be a pseudo reg, may be STACK_PARM,
2903 or may be a local stack slot if STACK_PARM is 0.
2904
2905 Set DECL_RTL to that place. */
2906
2907 if (nominal_mode == BLKmode)
2908 {
2909 /* If a BLKmode arrives in registers, copy it to a stack slot. */
2910 if (GET_CODE (entry_parm) == REG)
2911 {
2912 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
2913 UNITS_PER_WORD);
2914
2915 /* Note that we will be storing an integral number of words.
2916 So we have to be careful to ensure that we allocate an
2917 integral number of words. We do this below in the
2918 assign_stack_local if space was not allocated in the argument
2919 list. If it was, this will not work if PARM_BOUNDARY is not
2920 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2921 if it becomes a problem. */
2922
2923 if (stack_parm == 0)
7e41ffa2
RS
2924 {
2925 stack_parm
2926 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
2927 /* If this is a memory ref that contains aggregate components,
2928 mark it as such for cse and loop optimize. */
2929 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2930 }
2931
6f086dfc
RS
2932 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
2933 abort ();
2934
2935 move_block_from_reg (REGNO (entry_parm),
2936 validize_mem (stack_parm),
2937 size_stored / UNITS_PER_WORD);
2938 }
2939 DECL_RTL (parm) = stack_parm;
2940 }
74bd77a8 2941 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
a82ad570 2942 && ! DECL_INLINE (fndecl))
6f086dfc
RS
2943 /* layout_decl may set this. */
2944 || TREE_ADDRESSABLE (parm)
2945 || TREE_SIDE_EFFECTS (parm)
2946 /* If -ffloat-store specified, don't put explicit
2947 float variables into registers. */
2948 || (flag_float_store
2949 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
2950 /* Always assign pseudo to structure return or item passed
2951 by invisible reference. */
2952 || passed_pointer || parm == function_result_decl)
2953 {
00d8a4c1
RK
2954 /* Store the parm in a pseudoregister during the function, but we
2955 may need to do it in a wider mode. */
2956
2957 register rtx parmreg;
2958
2959 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
2960 if (TREE_CODE (TREE_TYPE (parm)) == INTEGER_TYPE
2961 || TREE_CODE (TREE_TYPE (parm)) == ENUMERAL_TYPE
2962 || TREE_CODE (TREE_TYPE (parm)) == BOOLEAN_TYPE
2963 || TREE_CODE (TREE_TYPE (parm)) == CHAR_TYPE
2964 || TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE
2965 || TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE
2966 || TREE_CODE (TREE_TYPE (parm)) == OFFSET_TYPE)
2967 {
2968 PROMOTE_MODE (nominal_mode, unsignedp, TREE_TYPE (parm));
2969 }
6f086dfc 2970
00d8a4c1 2971 parmreg = gen_reg_rtx (nominal_mode);
6f086dfc
RS
2972 REG_USERVAR_P (parmreg) = 1;
2973
2974 /* If this was an item that we received a pointer to, set DECL_RTL
2975 appropriately. */
2976 if (passed_pointer)
2977 {
2978 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
2979 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
2980 }
2981 else
2982 DECL_RTL (parm) = parmreg;
2983
2984 /* Copy the value into the register. */
2985 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
86f8eff3
RK
2986 {
2987 /* If ENTRY_PARM is a hard register, it might be in a register
2988 not valid for operating in its mode (e.g., an odd-numbered
2989 register for a DFmode). In that case, moves are the only
2990 thing valid, so we can't do a convert from there. This
2991 occurs when the calling sequence allow such misaligned
2992 usages. */
2993 if (GET_CODE (entry_parm) == REG
2994 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2995 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm),
2996 GET_MODE (entry_parm)))
00d8a4c1 2997 convert_move (parmreg, copy_to_reg (entry_parm), unsignedp);
86f8eff3 2998 else
00d8a4c1 2999 convert_move (parmreg, validize_mem (entry_parm), unsignedp);
86f8eff3 3000 }
6f086dfc
RS
3001 else
3002 emit_move_insn (parmreg, validize_mem (entry_parm));
3003
74bd77a8
RS
3004 /* If we were passed a pointer but the actual value
3005 can safely live in a register, put it in one. */
16bae307 3006 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
74bd77a8
RS
3007 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3008 && ! DECL_INLINE (fndecl))
3009 /* layout_decl may set this. */
3010 || TREE_ADDRESSABLE (parm)
3011 || TREE_SIDE_EFFECTS (parm)
3012 /* If -ffloat-store specified, don't put explicit
3013 float variables into registers. */
3014 || (flag_float_store
3015 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3016 {
2654605a
JW
3017 /* We can't use nominal_mode, because it will have been set to
3018 Pmode above. We must use the actual mode of the parm. */
3019 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
74bd77a8
RS
3020 emit_move_insn (parmreg, DECL_RTL (parm));
3021 DECL_RTL (parm) = parmreg;
3022 }
3023
6f086dfc
RS
3024 /* In any case, record the parm's desired stack location
3025 in case we later discover it must live in the stack. */
3026 if (REGNO (parmreg) >= nparmregs)
3027 {
3028 rtx *new;
3029 nparmregs = REGNO (parmreg) + 5;
3030 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3031 bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx));
3032 parm_reg_stack_loc = new;
3033 }
3034 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3035
3036 /* Mark the register as eliminable if we did no conversion
3037 and it was copied from memory at a fixed offset,
3038 and the arg pointer was not copied to a pseudo-reg.
3039 If the arg pointer is a pseudo reg or the offset formed
3040 an invalid address, such memory-equivalences
3041 as we make here would screw up life analysis for it. */
3042 if (nominal_mode == passed_mode
3043 && GET_CODE (entry_parm) == MEM
e16c591a 3044 && entry_parm == stack_parm
6f086dfc
RS
3045 && stack_offset.var == 0
3046 && reg_mentioned_p (virtual_incoming_args_rtx,
3047 XEXP (entry_parm, 0)))
3048 REG_NOTES (get_last_insn ())
3049 = gen_rtx (EXPR_LIST, REG_EQUIV,
3050 entry_parm, REG_NOTES (get_last_insn ()));
3051
3052 /* For pointer data type, suggest pointer register. */
3053 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3054 mark_reg_pointer (parmreg);
3055 }
3056 else
3057 {
3058 /* Value must be stored in the stack slot STACK_PARM
3059 during function execution. */
3060
3061 if (passed_mode != nominal_mode)
86f8eff3
RK
3062 {
3063 /* Conversion is required. */
3064 if (GET_CODE (entry_parm) == REG
3065 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
3066 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm), passed_mode))
3067 entry_parm = copy_to_reg (entry_parm);
3068
3069 entry_parm = convert_to_mode (nominal_mode, entry_parm, 0);
3070 }
6f086dfc
RS
3071
3072 if (entry_parm != stack_parm)
3073 {
3074 if (stack_parm == 0)
7e41ffa2
RS
3075 {
3076 stack_parm
3077 = assign_stack_local (GET_MODE (entry_parm),
3078 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3079 /* If this is a memory ref that contains aggregate components,
3080 mark it as such for cse and loop optimize. */
3081 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3082 }
3083
6f086dfc
RS
3084 emit_move_insn (validize_mem (stack_parm),
3085 validize_mem (entry_parm));
3086 }
3087
3088 DECL_RTL (parm) = stack_parm;
3089 }
3090
3091 /* If this "parameter" was the place where we are receiving the
3092 function's incoming structure pointer, set up the result. */
3093 if (parm == function_result_decl)
3094 DECL_RTL (DECL_RESULT (fndecl))
3095 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
3096
3097 if (TREE_THIS_VOLATILE (parm))
3098 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
3099 if (TREE_READONLY (parm))
3100 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
3101 }
3102
3103 max_parm_reg = max_reg_num ();
3104 last_parm_insn = get_last_insn ();
3105
3106 current_function_args_size = stack_args_size.constant;
3107
3108 /* Adjust function incoming argument size for alignment and
3109 minimum length. */
3110
3111#ifdef REG_PARM_STACK_SPACE
6f90e075 3112#ifndef MAYBE_REG_PARM_STACK_SPACE
6f086dfc
RS
3113 current_function_args_size = MAX (current_function_args_size,
3114 REG_PARM_STACK_SPACE (fndecl));
3115#endif
6f90e075 3116#endif
6f086dfc
RS
3117
3118#ifdef STACK_BOUNDARY
3119#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
3120
3121 current_function_args_size
3122 = ((current_function_args_size + STACK_BYTES - 1)
3123 / STACK_BYTES) * STACK_BYTES;
3124#endif
3125
3126#ifdef ARGS_GROW_DOWNWARD
3127 current_function_arg_offset_rtx
5f4f0e22 3128 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
6f086dfc
RS
3129 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
3130 size_int (-stack_args_size.constant)),
5f4f0e22 3131 NULL_RTX, VOIDmode, 0));
6f086dfc
RS
3132#else
3133 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
3134#endif
3135
3136 /* See how many bytes, if any, of its args a function should try to pop
3137 on return. */
3138
3139 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
3140 current_function_args_size);
3141
3142 /* For stdarg.h function, save info about regs and stack space
3143 used by the named args. */
3144
3145 if (stdarg)
3146 current_function_args_info = args_so_far;
3147
3148 /* Set the rtx used for the function return value. Put this in its
3149 own variable so any optimizers that need this information don't have
3150 to include tree.h. Do this here so it gets done when an inlined
3151 function gets output. */
3152
3153 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
3154}
3155\f
3156/* Compute the size and offset from the start of the stacked arguments for a
3157 parm passed in mode PASSED_MODE and with type TYPE.
3158
3159 INITIAL_OFFSET_PTR points to the current offset into the stacked
3160 arguments.
3161
3162 The starting offset and size for this parm are returned in *OFFSET_PTR
3163 and *ARG_SIZE_PTR, respectively.
3164
3165 IN_REGS is non-zero if the argument will be passed in registers. It will
3166 never be set if REG_PARM_STACK_SPACE is not defined.
3167
3168 FNDECL is the function in which the argument was defined.
3169
3170 There are two types of rounding that are done. The first, controlled by
3171 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3172 list to be aligned to the specific boundary (in bits). This rounding
3173 affects the initial and starting offsets, but not the argument size.
3174
3175 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3176 optionally rounds the size of the parm to PARM_BOUNDARY. The
3177 initial offset is not affected by this rounding, while the size always
3178 is and the starting offset may be. */
3179
3180/* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
3181 initial_offset_ptr is positive because locate_and_pad_parm's
3182 callers pass in the total size of args so far as
3183 initial_offset_ptr. arg_size_ptr is always positive.*/
3184
3185static void pad_to_arg_alignment (), pad_below ();
3186
3187void
3188locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
3189 initial_offset_ptr, offset_ptr, arg_size_ptr)
3190 enum machine_mode passed_mode;
3191 tree type;
3192 int in_regs;
3193 tree fndecl;
3194 struct args_size *initial_offset_ptr;
3195 struct args_size *offset_ptr;
3196 struct args_size *arg_size_ptr;
3197{
3198 tree sizetree
3199 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3200 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3201 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3202 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3203 int reg_parm_stack_space = 0;
3204
3205#ifdef REG_PARM_STACK_SPACE
3206 /* If we have found a stack parm before we reach the end of the
3207 area reserved for registers, skip that area. */
3208 if (! in_regs)
3209 {
29008b51
JW
3210#ifdef MAYBE_REG_PARM_STACK_SPACE
3211 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3212#else
6f086dfc 3213 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
29008b51 3214#endif
6f086dfc
RS
3215 if (reg_parm_stack_space > 0)
3216 {
3217 if (initial_offset_ptr->var)
3218 {
3219 initial_offset_ptr->var
3220 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3221 size_int (reg_parm_stack_space));
3222 initial_offset_ptr->constant = 0;
3223 }
3224 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3225 initial_offset_ptr->constant = reg_parm_stack_space;
3226 }
3227 }
3228#endif /* REG_PARM_STACK_SPACE */
3229
3230 arg_size_ptr->var = 0;
3231 arg_size_ptr->constant = 0;
3232
3233#ifdef ARGS_GROW_DOWNWARD
3234 if (initial_offset_ptr->var)
3235 {
3236 offset_ptr->constant = 0;
3237 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3238 initial_offset_ptr->var);
3239 }
3240 else
3241 {
3242 offset_ptr->constant = - initial_offset_ptr->constant;
3243 offset_ptr->var = 0;
3244 }
3245 if (where_pad == upward
3246 && (TREE_CODE (sizetree) != INTEGER_CST
3247 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3248 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3249 SUB_PARM_SIZE (*offset_ptr, sizetree);
3250 pad_to_arg_alignment (offset_ptr, boundary);
3251 if (initial_offset_ptr->var)
3252 {
3253 arg_size_ptr->var = size_binop (MINUS_EXPR,
3254 size_binop (MINUS_EXPR,
3255 integer_zero_node,
3256 initial_offset_ptr->var),
3257 offset_ptr->var);
3258 }
3259 else
3260 {
3261 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3262 offset_ptr->constant);
3263 }
3264/* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3265 if (where_pad == downward)
3266 pad_below (arg_size_ptr, passed_mode, sizetree);
3267#else /* !ARGS_GROW_DOWNWARD */
3268 pad_to_arg_alignment (initial_offset_ptr, boundary);
3269 *offset_ptr = *initial_offset_ptr;
3270 if (where_pad == downward)
3271 pad_below (offset_ptr, passed_mode, sizetree);
3272
3273#ifdef PUSH_ROUNDING
3274 if (passed_mode != BLKmode)
3275 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3276#endif
3277
3278 if (where_pad != none
3279 && (TREE_CODE (sizetree) != INTEGER_CST
3280 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3281 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3282
3283 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3284#endif /* ARGS_GROW_DOWNWARD */
3285}
3286
e16c591a
RS
3287/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3288 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3289
6f086dfc
RS
3290static void
3291pad_to_arg_alignment (offset_ptr, boundary)
3292 struct args_size *offset_ptr;
3293 int boundary;
3294{
3295 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3296
3297 if (boundary > BITS_PER_UNIT)
3298 {
3299 if (offset_ptr->var)
3300 {
3301 offset_ptr->var =
3302#ifdef ARGS_GROW_DOWNWARD
3303 round_down
3304#else
3305 round_up
3306#endif
3307 (ARGS_SIZE_TREE (*offset_ptr),
3308 boundary / BITS_PER_UNIT);
3309 offset_ptr->constant = 0; /*?*/
3310 }
3311 else
3312 offset_ptr->constant =
3313#ifdef ARGS_GROW_DOWNWARD
3314 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3315#else
3316 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3317#endif
3318 }
3319}
3320
3321static void
3322pad_below (offset_ptr, passed_mode, sizetree)
3323 struct args_size *offset_ptr;
3324 enum machine_mode passed_mode;
3325 tree sizetree;
3326{
3327 if (passed_mode != BLKmode)
3328 {
3329 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3330 offset_ptr->constant
3331 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3332 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3333 - GET_MODE_SIZE (passed_mode));
3334 }
3335 else
3336 {
3337 if (TREE_CODE (sizetree) != INTEGER_CST
3338 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3339 {
3340 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3341 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3342 /* Add it in. */
3343 ADD_PARM_SIZE (*offset_ptr, s2);
3344 SUB_PARM_SIZE (*offset_ptr, sizetree);
3345 }
3346 }
3347}
3348
3349static tree
3350round_down (value, divisor)
3351 tree value;
3352 int divisor;
3353{
3354 return size_binop (MULT_EXPR,
3355 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3356 size_int (divisor));
3357}
3358\f
3359/* Walk the tree of blocks describing the binding levels within a function
3360 and warn about uninitialized variables.
3361 This is done after calling flow_analysis and before global_alloc
3362 clobbers the pseudo-regs to hard regs. */
3363
3364void
3365uninitialized_vars_warning (block)
3366 tree block;
3367{
3368 register tree decl, sub;
3369 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3370 {
3371 if (TREE_CODE (decl) == VAR_DECL
3372 /* These warnings are unreliable for and aggregates
3373 because assigning the fields one by one can fail to convince
3374 flow.c that the entire aggregate was initialized.
3375 Unions are troublesome because members may be shorter. */
3376 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3377 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
3378 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3379 && DECL_RTL (decl) != 0
3380 && GET_CODE (DECL_RTL (decl)) == REG
3381 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3382 warning_with_decl (decl,
3383 "`%s' may be used uninitialized in this function");
3384 if (TREE_CODE (decl) == VAR_DECL
3385 && DECL_RTL (decl) != 0
3386 && GET_CODE (DECL_RTL (decl)) == REG
3387 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3388 warning_with_decl (decl,
3389 "variable `%s' may be clobbered by `longjmp'");
3390 }
3391 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3392 uninitialized_vars_warning (sub);
3393}
3394
3395/* Do the appropriate part of uninitialized_vars_warning
3396 but for arguments instead of local variables. */
3397
3398void
3399setjmp_args_warning (block)
3400 tree block;
3401{
3402 register tree decl;
3403 for (decl = DECL_ARGUMENTS (current_function_decl);
3404 decl; decl = TREE_CHAIN (decl))
3405 if (DECL_RTL (decl) != 0
3406 && GET_CODE (DECL_RTL (decl)) == REG
3407 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3408 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp'");
3409}
3410
3411/* If this function call setjmp, put all vars into the stack
3412 unless they were declared `register'. */
3413
3414void
3415setjmp_protect (block)
3416 tree block;
3417{
3418 register tree decl, sub;
3419 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3420 if ((TREE_CODE (decl) == VAR_DECL
3421 || TREE_CODE (decl) == PARM_DECL)
3422 && DECL_RTL (decl) != 0
3423 && GET_CODE (DECL_RTL (decl)) == REG
b335c2cc
TW
3424 /* If this variable came from an inline function, it must be
3425 that it's life doesn't overlap the setjmp. If there was a
3426 setjmp in the function, it would already be in memory. We
3427 must exclude such variable because their DECL_RTL might be
3428 set to strange things such as virtual_stack_vars_rtx. */
3429 && ! DECL_FROM_INLINE (decl)
6f086dfc
RS
3430 && (
3431#ifdef NON_SAVING_SETJMP
3432 /* If longjmp doesn't restore the registers,
3433 don't put anything in them. */
3434 NON_SAVING_SETJMP
3435 ||
3436#endif
a82ad570 3437 ! DECL_REGISTER (decl)))
6f086dfc
RS
3438 put_var_into_stack (decl);
3439 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3440 setjmp_protect (sub);
3441}
3442\f
3443/* Like the previous function, but for args instead of local variables. */
3444
3445void
3446setjmp_protect_args ()
3447{
3448 register tree decl, sub;
3449 for (decl = DECL_ARGUMENTS (current_function_decl);
3450 decl; decl = TREE_CHAIN (decl))
3451 if ((TREE_CODE (decl) == VAR_DECL
3452 || TREE_CODE (decl) == PARM_DECL)
3453 && DECL_RTL (decl) != 0
3454 && GET_CODE (DECL_RTL (decl)) == REG
3455 && (
3456 /* If longjmp doesn't restore the registers,
3457 don't put anything in them. */
3458#ifdef NON_SAVING_SETJMP
3459 NON_SAVING_SETJMP
3460 ||
3461#endif
a82ad570 3462 ! DECL_REGISTER (decl)))
6f086dfc
RS
3463 put_var_into_stack (decl);
3464}
3465\f
3466/* Return the context-pointer register corresponding to DECL,
3467 or 0 if it does not need one. */
3468
3469rtx
3470lookup_static_chain (decl)
3471 tree decl;
3472{
3473 tree context = decl_function_context (decl);
3474 tree link;
3475
3476 if (context == 0)
3477 return 0;
3478
3479 /* We treat inline_function_decl as an alias for the current function
3480 because that is the inline function whose vars, types, etc.
3481 are being merged into the current function.
3482 See expand_inline_function. */
3483 if (context == current_function_decl || context == inline_function_decl)
3484 return virtual_stack_vars_rtx;
3485
3486 for (link = context_display; link; link = TREE_CHAIN (link))
3487 if (TREE_PURPOSE (link) == context)
3488 return RTL_EXPR_RTL (TREE_VALUE (link));
3489
3490 abort ();
3491}
3492\f
3493/* Convert a stack slot address ADDR for variable VAR
3494 (from a containing function)
3495 into an address valid in this function (using a static chain). */
3496
3497rtx
3498fix_lexical_addr (addr, var)
3499 rtx addr;
3500 tree var;
3501{
3502 rtx basereg;
3503 int displacement;
3504 tree context = decl_function_context (var);
3505 struct function *fp;
3506 rtx base = 0;
3507
3508 /* If this is the present function, we need not do anything. */
3509 if (context == current_function_decl || context == inline_function_decl)
3510 return addr;
3511
3512 for (fp = outer_function_chain; fp; fp = fp->next)
3513 if (fp->decl == context)
3514 break;
3515
3516 if (fp == 0)
3517 abort ();
3518
3519 /* Decode given address as base reg plus displacement. */
3520 if (GET_CODE (addr) == REG)
3521 basereg = addr, displacement = 0;
3522 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3523 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3524 else
3525 abort ();
3526
3527 /* We accept vars reached via the containing function's
3528 incoming arg pointer and via its stack variables pointer. */
3529 if (basereg == fp->internal_arg_pointer)
3530 {
3531 /* If reached via arg pointer, get the arg pointer value
3532 out of that function's stack frame.
3533
3534 There are two cases: If a separate ap is needed, allocate a
3535 slot in the outer function for it and dereference it that way.
3536 This is correct even if the real ap is actually a pseudo.
3537 Otherwise, just adjust the offset from the frame pointer to
3538 compensate. */
3539
3540#ifdef NEED_SEPARATE_AP
3541 rtx addr;
3542
3543 if (fp->arg_pointer_save_area == 0)
3544 fp->arg_pointer_save_area
3545 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
3546
3547 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
3548 addr = memory_address (Pmode, addr);
3549
3550 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
3551#else
3552 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
86f8eff3 3553 base = lookup_static_chain (var);
6f086dfc
RS
3554#endif
3555 }
3556
3557 else if (basereg == virtual_stack_vars_rtx)
3558 {
3559 /* This is the same code as lookup_static_chain, duplicated here to
3560 avoid an extra call to decl_function_context. */
3561 tree link;
3562
3563 for (link = context_display; link; link = TREE_CHAIN (link))
3564 if (TREE_PURPOSE (link) == context)
3565 {
3566 base = RTL_EXPR_RTL (TREE_VALUE (link));
3567 break;
3568 }
3569 }
3570
3571 if (base == 0)
3572 abort ();
3573
3574 /* Use same offset, relative to appropriate static chain or argument
3575 pointer. */
3576 return plus_constant (base, displacement);
3577}
3578\f
3579/* Return the address of the trampoline for entering nested fn FUNCTION.
3580 If necessary, allocate a trampoline (in the stack frame)
3581 and emit rtl to initialize its contents (at entry to this function). */
3582
3583rtx
3584trampoline_address (function)
3585 tree function;
3586{
3587 tree link;
3588 tree rtlexp;
3589 rtx tramp;
3590 struct function *fp;
3591 tree fn_context;
3592
3593 /* Find an existing trampoline and return it. */
3594 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3595 if (TREE_PURPOSE (link) == function)
3596 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
3597 for (fp = outer_function_chain; fp; fp = fp->next)
3598 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
3599 if (TREE_PURPOSE (link) == function)
3600 {
3601 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
3602 function);
3603 return round_trampoline_addr (tramp);
3604 }
3605
3606 /* None exists; we must make one. */
3607
3608 /* Find the `struct function' for the function containing FUNCTION. */
3609 fp = 0;
3610 fn_context = decl_function_context (function);
3611 if (fn_context != current_function_decl)
3612 for (fp = outer_function_chain; fp; fp = fp->next)
3613 if (fp->decl == fn_context)
3614 break;
3615
3616 /* Allocate run-time space for this trampoline
3617 (usually in the defining function's stack frame). */
3618#ifdef ALLOCATE_TRAMPOLINE
3619 tramp = ALLOCATE_TRAMPOLINE (fp);
3620#else
3621 /* If rounding needed, allocate extra space
3622 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3623#ifdef TRAMPOLINE_ALIGNMENT
3624#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3625#else
3626#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3627#endif
3628 if (fp != 0)
3629 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
3630 else
3631 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
3632#endif
3633
3634 /* Record the trampoline for reuse and note it for later initialization
3635 by expand_function_end. */
3636 if (fp != 0)
3637 {
3638 push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack);
3639 rtlexp = make_node (RTL_EXPR);
3640 RTL_EXPR_RTL (rtlexp) = tramp;
3641 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
3642 pop_obstacks ();
3643 }
3644 else
3645 {
3646 /* Make the RTL_EXPR node temporary, not momentary, so that the
3647 trampoline_list doesn't become garbage. */
3648 int momentary = suspend_momentary ();
3649 rtlexp = make_node (RTL_EXPR);
3650 resume_momentary (momentary);
3651
3652 RTL_EXPR_RTL (rtlexp) = tramp;
3653 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
3654 }
3655
3656 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
3657 return round_trampoline_addr (tramp);
3658}
3659
3660/* Given a trampoline address,
3661 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3662
3663static rtx
3664round_trampoline_addr (tramp)
3665 rtx tramp;
3666{
3667#ifdef TRAMPOLINE_ALIGNMENT
3668 /* Round address up to desired boundary. */
3669 rtx temp = gen_reg_rtx (Pmode);
3670 temp = expand_binop (Pmode, add_optab, tramp,
5f4f0e22 3671 GEN_INT (TRAMPOLINE_ALIGNMENT - 1),
6f086dfc
RS
3672 temp, 0, OPTAB_LIB_WIDEN);
3673 tramp = expand_binop (Pmode, and_optab, temp,
5f4f0e22 3674 GEN_INT (- TRAMPOLINE_ALIGNMENT),
6f086dfc
RS
3675 temp, 0, OPTAB_LIB_WIDEN);
3676#endif
3677 return tramp;
3678}
3679\f
467456d0
RS
3680/* The functions identify_blocks and reorder_blocks provide a way to
3681 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
3682 duplicate portions of the RTL code. Call identify_blocks before
3683 changing the RTL, and call reorder_blocks after. */
3684
3685static int all_blocks ();
3686static tree blocks_nreverse ();
3687
3688/* Put all this function's BLOCK nodes into a vector, and return it.
3689 Also store in each NOTE for the beginning or end of a block
3690 the index of that block in the vector.
3691 The arguments are TOP_BLOCK, the top-level block of the function,
3692 and INSNS, the insn chain of the function. */
3693
3694tree *
3695identify_blocks (top_block, insns)
3696 tree top_block;
3697 rtx insns;
3698{
fc289cd1
JW
3699 int n_blocks;
3700 tree *block_vector;
3701 int *block_stack;
467456d0
RS
3702 int depth = 0;
3703 int next_block_number = 0;
3704 int current_block_number = 0;
3705 rtx insn;
3706
fc289cd1
JW
3707 if (top_block == 0)
3708 return 0;
3709
3710 n_blocks = all_blocks (top_block, 0);
3711 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
3712 block_stack = (int *) alloca (n_blocks * sizeof (int));
3713
467456d0
RS
3714 all_blocks (top_block, block_vector);
3715
3716 for (insn = insns; insn; insn = NEXT_INSN (insn))
3717 if (GET_CODE (insn) == NOTE)
3718 {
3719 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3720 {
3721 block_stack[depth++] = current_block_number;
3722 current_block_number = next_block_number;
1b2ac438 3723 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
467456d0
RS
3724 }
3725 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3726 {
3727 current_block_number = block_stack[--depth];
1b2ac438 3728 NOTE_BLOCK_NUMBER (insn) = current_block_number;
467456d0
RS
3729 }
3730 }
3731
3732 return block_vector;
3733}
3734
3735/* Given BLOCK_VECTOR which was returned by identify_blocks,
3736 and a revised instruction chain, rebuild the tree structure
3737 of BLOCK nodes to correspond to the new order of RTL.
fc289cd1 3738 The new block tree is inserted below TOP_BLOCK.
467456d0
RS
3739 Returns the current top-level block. */
3740
3741tree
fc289cd1 3742reorder_blocks (block_vector, top_block, insns)
467456d0 3743 tree *block_vector;
fc289cd1 3744 tree top_block;
467456d0
RS
3745 rtx insns;
3746{
fc289cd1 3747 tree current_block = top_block;
467456d0
RS
3748 rtx insn;
3749
fc289cd1
JW
3750 if (block_vector == 0)
3751 return top_block;
3752
3753 /* Prune the old tree away, so that it doesn't get in the way. */
3754 BLOCK_SUBBLOCKS (current_block) = 0;
3755
467456d0
RS
3756 for (insn = insns; insn; insn = NEXT_INSN (insn))
3757 if (GET_CODE (insn) == NOTE)
3758 {
3759 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3760 {
3761 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
3762 /* If we have seen this block before, copy it. */
3763 if (TREE_ASM_WRITTEN (block))
3764 block = copy_node (block);
fc289cd1 3765 BLOCK_SUBBLOCKS (block) = 0;
467456d0
RS
3766 TREE_ASM_WRITTEN (block) = 1;
3767 BLOCK_SUPERCONTEXT (block) = current_block;
3768 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3769 BLOCK_SUBBLOCKS (current_block) = block;
3770 current_block = block;
1b2ac438 3771 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
3772 }
3773 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3774 {
3775 BLOCK_SUBBLOCKS (current_block)
3776 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3777 current_block = BLOCK_SUPERCONTEXT (current_block);
1b2ac438 3778 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
3779 }
3780 }
3781
3782 return current_block;
3783}
3784
3785/* Reverse the order of elements in the chain T of blocks,
3786 and return the new head of the chain (old last element). */
3787
3788static tree
3789blocks_nreverse (t)
3790 tree t;
3791{
3792 register tree prev = 0, decl, next;
3793 for (decl = t; decl; decl = next)
3794 {
3795 next = BLOCK_CHAIN (decl);
3796 BLOCK_CHAIN (decl) = prev;
3797 prev = decl;
3798 }
3799 return prev;
3800}
3801
3802/* Count the subblocks of BLOCK, and list them all into the vector VECTOR.
3803 Also clear TREE_ASM_WRITTEN in all blocks. */
3804
3805static int
3806all_blocks (block, vector)
3807 tree block;
3808 tree *vector;
3809{
3810 int n_blocks = 1;
3811 tree subblocks;
3812
3813 TREE_ASM_WRITTEN (block) = 0;
3814 /* Record this block. */
fc289cd1
JW
3815 if (vector)
3816 vector[0] = block;
467456d0
RS
3817
3818 /* Record the subblocks, and their subblocks. */
3819 for (subblocks = BLOCK_SUBBLOCKS (block);
3820 subblocks; subblocks = BLOCK_CHAIN (subblocks))
fc289cd1 3821 n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0);
467456d0
RS
3822
3823 return n_blocks;
3824}
3825\f
6f086dfc
RS
3826/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3827 and initialize static variables for generating RTL for the statements
3828 of the function. */
3829
3830void
3831init_function_start (subr, filename, line)
3832 tree subr;
3833 char *filename;
3834 int line;
3835{
3836 char *junk;
3837
3838 init_stmt_for_function ();
3839
3840 cse_not_expected = ! optimize;
3841
3842 /* Caller save not needed yet. */
3843 caller_save_needed = 0;
3844
3845 /* No stack slots have been made yet. */
3846 stack_slot_list = 0;
3847
3848 /* There is no stack slot for handling nonlocal gotos. */
3849 nonlocal_goto_handler_slot = 0;
3850 nonlocal_goto_stack_level = 0;
3851
3852 /* No labels have been declared for nonlocal use. */
3853 nonlocal_labels = 0;
3854
3855 /* No function calls so far in this function. */
3856 function_call_count = 0;
3857
3858 /* No parm regs have been allocated.
3859 (This is important for output_inline_function.) */
3860 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3861
3862 /* Initialize the RTL mechanism. */
3863 init_emit ();
3864
3865 /* Initialize the queue of pending postincrement and postdecrements,
3866 and some other info in expr.c. */
3867 init_expr ();
3868
3869 /* We haven't done register allocation yet. */
3870 reg_renumber = 0;
3871
3872 init_const_rtx_hash_table ();
3873
3874 current_function_name = (*decl_printable_name) (subr, &junk);
3875
3876 /* Nonzero if this is a nested function that uses a static chain. */
3877
3878 current_function_needs_context
3879 = (decl_function_context (current_function_decl) != 0);
3880
3881 /* Set if a call to setjmp is seen. */
3882 current_function_calls_setjmp = 0;
3883
3884 /* Set if a call to longjmp is seen. */
3885 current_function_calls_longjmp = 0;
3886
3887 current_function_calls_alloca = 0;
3888 current_function_has_nonlocal_label = 0;
3889 current_function_contains_functions = 0;
3890
3891 current_function_returns_pcc_struct = 0;
3892 current_function_returns_struct = 0;
3893 current_function_epilogue_delay_list = 0;
3894 current_function_uses_const_pool = 0;
3895 current_function_uses_pic_offset_table = 0;
3896
3897 /* We have not yet needed to make a label to jump to for tail-recursion. */
3898 tail_recursion_label = 0;
3899
3900 /* We haven't had a need to make a save area for ap yet. */
3901
3902 arg_pointer_save_area = 0;
3903
3904 /* No stack slots allocated yet. */
3905 frame_offset = 0;
3906
3907 /* No SAVE_EXPRs in this function yet. */
3908 save_expr_regs = 0;
3909
3910 /* No RTL_EXPRs in this function yet. */
3911 rtl_expr_chain = 0;
3912
3913 /* We have not allocated any temporaries yet. */
3914 temp_slots = 0;
3915 temp_slot_level = 0;
3916
3917 /* Within function body, compute a type's size as soon it is laid out. */
3918 immediate_size_expand++;
3919
3920 init_pending_stack_adjust ();
3921 inhibit_defer_pop = 0;
3922
3923 current_function_outgoing_args_size = 0;
3924
3925 /* Initialize the insn lengths. */
3926 init_insn_lengths ();
3927
3928 /* Prevent ever trying to delete the first instruction of a function.
3929 Also tell final how to output a linenum before the function prologue. */
3930 emit_line_note (filename, line);
3931
3932 /* Make sure first insn is a note even if we don't want linenums.
3933 This makes sure the first insn will never be deleted.
3934 Also, final expects a note to appear there. */
5f4f0e22 3935 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
3936
3937 /* Set flags used by final.c. */
3938 if (aggregate_value_p (DECL_RESULT (subr)))
3939 {
3940#ifdef PCC_STATIC_STRUCT_RETURN
3941 if (flag_pcc_struct_return)
3942 current_function_returns_pcc_struct = 1;
3943 else
3944#endif
3945 current_function_returns_struct = 1;
3946 }
3947
3948 /* Warn if this value is an aggregate type,
3949 regardless of which calling convention we are using for it. */
3950 if (warn_aggregate_return
3951 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
3952 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
3953 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
3954 warning ("function returns an aggregate");
3955
3956 current_function_returns_pointer
3957 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
3958
3959 /* Indicate that we need to distinguish between the return value of the
3960 present function and the return value of a function being called. */
3961 rtx_equal_function_value_matters = 1;
3962
3963 /* Indicate that we have not instantiated virtual registers yet. */
3964 virtuals_instantiated = 0;
3965
3966 /* Indicate we have no need of a frame pointer yet. */
3967 frame_pointer_needed = 0;
3968
3969 /* By default assume not varargs. */
3970 current_function_varargs = 0;
3971}
3972
3973/* Indicate that the current function uses extra args
3974 not explicitly mentioned in the argument list in any fashion. */
3975
3976void
3977mark_varargs ()
3978{
3979 current_function_varargs = 1;
3980}
3981
3982/* Expand a call to __main at the beginning of a possible main function. */
3983
3984void
3985expand_main_function ()
3986{
b335c2cc 3987#if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
6f086dfc
RS
3988 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__main"), 0,
3989 VOIDmode, 0);
b335c2cc 3990#endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
6f086dfc
RS
3991}
3992\f
3993/* Start the RTL for a new function, and set variables used for
3994 emitting RTL.
3995 SUBR is the FUNCTION_DECL node.
3996 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3997 the function's parameters, which must be run at any return statement. */
3998
3999void
4000expand_function_start (subr, parms_have_cleanups)
4001 tree subr;
4002 int parms_have_cleanups;
4003{
4004 register int i;
4005 tree tem;
4006 rtx last_ptr;
4007
4008 /* Make sure volatile mem refs aren't considered
4009 valid operands of arithmetic insns. */
4010 init_recog_no_volatile ();
4011
4012 /* If function gets a static chain arg, store it in the stack frame.
4013 Do this first, so it gets the first stack slot offset. */
4014 if (current_function_needs_context)
3e2481e9
JW
4015 {
4016 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4017 emit_move_insn (last_ptr, static_chain_incoming_rtx);
4018 }
6f086dfc
RS
4019
4020 /* If the parameters of this function need cleaning up, get a label
4021 for the beginning of the code which executes those cleanups. This must
4022 be done before doing anything with return_label. */
4023 if (parms_have_cleanups)
4024 cleanup_label = gen_label_rtx ();
4025 else
4026 cleanup_label = 0;
4027
4028 /* Make the label for return statements to jump to, if this machine
4029 does not have a one-instruction return and uses an epilogue,
4030 or if it returns a structure, or if it has parm cleanups. */
4031#ifdef HAVE_return
4032 if (cleanup_label == 0 && HAVE_return
4033 && ! current_function_returns_pcc_struct
4034 && ! (current_function_returns_struct && ! optimize))
4035 return_label = 0;
4036 else
4037 return_label = gen_label_rtx ();
4038#else
4039 return_label = gen_label_rtx ();
4040#endif
4041
4042 /* Initialize rtx used to return the value. */
4043 /* Do this before assign_parms so that we copy the struct value address
4044 before any library calls that assign parms might generate. */
4045
4046 /* Decide whether to return the value in memory or in a register. */
4047 if (aggregate_value_p (DECL_RESULT (subr)))
4048 {
4049 /* Returning something that won't go in a register. */
4050 register rtx value_address;
4051
4052#ifdef PCC_STATIC_STRUCT_RETURN
4053 if (current_function_returns_pcc_struct)
4054 {
4055 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4056 value_address = assemble_static_space (size);
4057 }
4058 else
4059#endif
4060 {
4061 /* Expect to be passed the address of a place to store the value.
4062 If it is passed as an argument, assign_parms will take care of
4063 it. */
4064 if (struct_value_incoming_rtx)
4065 {
4066 value_address = gen_reg_rtx (Pmode);
4067 emit_move_insn (value_address, struct_value_incoming_rtx);
4068 }
4069 }
4070 if (value_address)
4071 DECL_RTL (DECL_RESULT (subr))
4072 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
4073 value_address);
4074 }
4075 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4076 /* If return mode is void, this decl rtl should not be used. */
4077 DECL_RTL (DECL_RESULT (subr)) = 0;
4078 else if (parms_have_cleanups)
4079 /* If function will end with cleanup code for parms,
4080 compute the return values into a pseudo reg,
4081 which we will copy into the true return register
4082 after the cleanups are done. */
4083 DECL_RTL (DECL_RESULT (subr))
4084 = gen_reg_rtx (DECL_MODE (DECL_RESULT (subr)));
4085 else
4086 /* Scalar, returned in a register. */
4087 {
4088#ifdef FUNCTION_OUTGOING_VALUE
4089 DECL_RTL (DECL_RESULT (subr))
4090 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4091#else
4092 DECL_RTL (DECL_RESULT (subr))
4093 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4094#endif
4095
4096 /* Mark this reg as the function's return value. */
4097 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
4098 {
4099 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
4100 /* Needed because we may need to move this to memory
4101 in case it's a named return value whose address is taken. */
a82ad570 4102 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6f086dfc
RS
4103 }
4104 }
4105
4106 /* Initialize rtx for parameters and local variables.
4107 In some cases this requires emitting insns. */
4108
4109 assign_parms (subr, 0);
4110
4111 /* The following was moved from init_function_start.
4112 The move is supposed to make sdb output more accurate. */
4113 /* Indicate the beginning of the function body,
4114 as opposed to parm setup. */
5f4f0e22 4115 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6f086dfc
RS
4116
4117 /* If doing stupid allocation, mark parms as born here. */
4118
4119 if (GET_CODE (get_last_insn ()) != NOTE)
5f4f0e22 4120 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
4121 parm_birth_insn = get_last_insn ();
4122
4123 if (obey_regdecls)
4124 {
4125 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4126 use_variable (regno_reg_rtx[i]);
4127
4128 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4129 use_variable (current_function_internal_arg_pointer);
4130 }
4131
4132 /* Fetch static chain values for containing functions. */
4133 tem = decl_function_context (current_function_decl);
3e2481e9
JW
4134 /* If not doing stupid register allocation, then start off with the static
4135 chain pointer in a pseudo register. Otherwise, we use the stack
4136 address that was generated above. */
4137 if (tem && ! obey_regdecls)
6f086dfc
RS
4138 last_ptr = copy_to_reg (static_chain_incoming_rtx);
4139 context_display = 0;
4140 while (tem)
4141 {
4142 tree rtlexp = make_node (RTL_EXPR);
4143
4144 RTL_EXPR_RTL (rtlexp) = last_ptr;
4145 context_display = tree_cons (tem, rtlexp, context_display);
4146 tem = decl_function_context (tem);
4147 if (tem == 0)
4148 break;
4149 /* Chain thru stack frames, assuming pointer to next lexical frame
4150 is found at the place we always store it. */
4151#ifdef FRAME_GROWS_DOWNWARD
4152 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
4153#endif
4154 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
4155 memory_address (Pmode, last_ptr)));
4156 }
4157
4158 /* After the display initializations is where the tail-recursion label
4159 should go, if we end up needing one. Ensure we have a NOTE here
4160 since some things (like trampolines) get placed before this. */
5f4f0e22 4161 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
4162
4163 /* Evaluate now the sizes of any types declared among the arguments. */
4164 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5f4f0e22 4165 expand_expr (TREE_VALUE (tem), NULL_RTX, VOIDmode, 0);
6f086dfc
RS
4166
4167 /* Make sure there is a line number after the function entry setup code. */
4168 force_next_line_note ();
4169}
4170\f
4171/* Generate RTL for the end of the current function.
4172 FILENAME and LINE are the current position in the source file. */
4173
4174/* It is up to language-specific callers to do cleanups for parameters. */
4175
4176void
4177expand_function_end (filename, line)
4178 char *filename;
4179 int line;
4180{
4181 register int i;
4182 tree link;
4183
4184 static rtx initial_trampoline;
4185
4186#ifdef NON_SAVING_SETJMP
4187 /* Don't put any variables in registers if we call setjmp
4188 on a machine that fails to restore the registers. */
4189 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
4190 {
4191 setjmp_protect (DECL_INITIAL (current_function_decl));
4192 setjmp_protect_args ();
4193 }
4194#endif
4195
4196 /* Save the argument pointer if a save area was made for it. */
4197 if (arg_pointer_save_area)
4198 {
4199 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
4200 emit_insn_before (x, tail_recursion_reentry);
4201 }
4202
4203 /* Initialize any trampolines required by this function. */
4204 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4205 {
4206 tree function = TREE_PURPOSE (link);
4207 rtx context = lookup_static_chain (function);
4208 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
4209 rtx seq;
4210
4211 /* First make sure this compilation has a template for
4212 initializing trampolines. */
4213 if (initial_trampoline == 0)
86f8eff3
RK
4214 {
4215 end_temporary_allocation ();
4216 initial_trampoline
4217 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
4218 resume_temporary_allocation ();
4219 }
6f086dfc
RS
4220
4221 /* Generate insns to initialize the trampoline. */
4222 start_sequence ();
4223 tramp = change_address (initial_trampoline, BLKmode,
4224 round_trampoline_addr (XEXP (tramp, 0)));
5f4f0e22 4225 emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE),
6f086dfc
RS
4226 FUNCTION_BOUNDARY / BITS_PER_UNIT);
4227 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
4228 XEXP (DECL_RTL (function), 0), context);
4229 seq = get_insns ();
4230 end_sequence ();
4231
4232 /* Put those insns at entry to the containing function (this one). */
4233 emit_insns_before (seq, tail_recursion_reentry);
4234 }
4235 /* Clear the trampoline_list for the next function. */
4236 trampoline_list = 0;
4237
4238#if 0 /* I think unused parms are legitimate enough. */
4239 /* Warn about unused parms. */
4240 if (warn_unused)
4241 {
4242 rtx decl;
4243
4244 for (decl = DECL_ARGUMENTS (current_function_decl);
4245 decl; decl = TREE_CHAIN (decl))
4246 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
4247 warning_with_decl (decl, "unused parameter `%s'");
4248 }
4249#endif
4250
4251 /* Delete handlers for nonlocal gotos if nothing uses them. */
4252 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
4253 delete_handlers ();
4254
4255 /* End any sequences that failed to be closed due to syntax errors. */
4256 while (in_sequence_p ())
5f4f0e22 4257 end_sequence ();
6f086dfc
RS
4258
4259 /* Outside function body, can't compute type's actual size
4260 until next function's body starts. */
4261 immediate_size_expand--;
4262
4263 /* If doing stupid register allocation,
4264 mark register parms as dying here. */
4265
4266 if (obey_regdecls)
4267 {
4268 rtx tem;
4269 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4270 use_variable (regno_reg_rtx[i]);
4271
4272 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
4273
4274 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
4275 {
4276 use_variable (XEXP (tem, 0));
4277 use_variable_after (XEXP (tem, 0), parm_birth_insn);
4278 }
4279
4280 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4281 use_variable (current_function_internal_arg_pointer);
4282 }
4283
4284 clear_pending_stack_adjust ();
4285 do_pending_stack_adjust ();
4286
4287 /* Mark the end of the function body.
4288 If control reaches this insn, the function can drop through
4289 without returning a value. */
5f4f0e22 4290 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6f086dfc
RS
4291
4292 /* Output a linenumber for the end of the function.
4293 SDB depends on this. */
4294 emit_line_note_force (filename, line);
4295
4296 /* Output the label for the actual return from the function,
4297 if one is expected. This happens either because a function epilogue
4298 is used instead of a return instruction, or because a return was done
4299 with a goto in order to run local cleanups, or because of pcc-style
4300 structure returning. */
4301
4302 if (return_label)
4303 emit_label (return_label);
4304
4305 /* If we had calls to alloca, and this machine needs
4306 an accurate stack pointer to exit the function,
4307 insert some code to save and restore the stack pointer. */
4308#ifdef EXIT_IGNORE_STACK
4309 if (! EXIT_IGNORE_STACK)
4310#endif
4311 if (current_function_calls_alloca)
4312 {
59257ff7
RK
4313 rtx tem = 0;
4314
4315 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5f4f0e22 4316 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6f086dfc
RS
4317 }
4318
4319 /* If scalar return value was computed in a pseudo-reg,
4320 copy that to the hard return register. */
4321 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
4322 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
4323 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
4324 >= FIRST_PSEUDO_REGISTER))
4325 {
4326 rtx real_decl_result;
4327
4328#ifdef FUNCTION_OUTGOING_VALUE
4329 real_decl_result
4330 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4331 current_function_decl);
4332#else
4333 real_decl_result
4334 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4335 current_function_decl);
4336#endif
4337 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
4338 emit_move_insn (real_decl_result,
4339 DECL_RTL (DECL_RESULT (current_function_decl)));
4340 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
4341 }
4342
4343 /* If returning a structure, arrange to return the address of the value
4344 in a place where debuggers expect to find it.
4345
4346 If returning a structure PCC style,
4347 the caller also depends on this value.
4348 And current_function_returns_pcc_struct is not necessarily set. */
4349 if (current_function_returns_struct
4350 || current_function_returns_pcc_struct)
4351 {
4352 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4353 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4354#ifdef FUNCTION_OUTGOING_VALUE
4355 rtx outgoing
4356 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4357 current_function_decl);
4358#else
4359 rtx outgoing
4360 = FUNCTION_VALUE (build_pointer_type (type),
4361 current_function_decl);
4362#endif
4363
4364 /* Mark this as a function return value so integrate will delete the
4365 assignment and USE below when inlining this function. */
4366 REG_FUNCTION_VALUE_P (outgoing) = 1;
4367
4368 emit_move_insn (outgoing, value_address);
4369 use_variable (outgoing);
4370 }
4371
4372 /* Output a return insn if we are using one.
4373 Otherwise, let the rtl chain end here, to drop through
4374 into the epilogue. */
4375
4376#ifdef HAVE_return
4377 if (HAVE_return)
4378 {
4379 emit_jump_insn (gen_return ());
4380 emit_barrier ();
4381 }
4382#endif
4383
4384 /* Fix up any gotos that jumped out to the outermost
4385 binding level of the function.
4386 Must follow emitting RETURN_LABEL. */
4387
4388 /* If you have any cleanups to do at this point,
4389 and they need to create temporary variables,
4390 then you will lose. */
5f4f0e22 4391 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0);
6f086dfc 4392}
bdac5f58
TW
4393\f
4394/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
4395
4396static int *prologue;
4397static int *epilogue;
4398
4399/* Create an array that records the INSN_UIDs of INSNS (either a sequence
4400 or a single insn). */
4401
4402static int *
4403record_insns (insns)
4404 rtx insns;
4405{
4406 int *vec;
4407
4408 if (GET_CODE (insns) == SEQUENCE)
4409 {
4410 int len = XVECLEN (insns, 0);
4411 vec = (int *) oballoc ((len + 1) * sizeof (int));
4412 vec[len] = 0;
4413 while (--len >= 0)
4414 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
4415 }
4416 else
4417 {
4418 vec = (int *) oballoc (2 * sizeof (int));
4419 vec[0] = INSN_UID (insns);
4420 vec[1] = 0;
4421 }
4422 return vec;
4423}
4424
10914065 4425/* Determine how many INSN_UIDs in VEC are part of INSN. */
bdac5f58 4426
10914065 4427static int
bdac5f58
TW
4428contains (insn, vec)
4429 rtx insn;
4430 int *vec;
4431{
4432 register int i, j;
4433
4434 if (GET_CODE (insn) == INSN
4435 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4436 {
10914065 4437 int count = 0;
bdac5f58
TW
4438 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4439 for (j = 0; vec[j]; j++)
4440 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
10914065
TW
4441 count++;
4442 return count;
bdac5f58
TW
4443 }
4444 else
4445 {
4446 for (j = 0; vec[j]; j++)
4447 if (INSN_UID (insn) == vec[j])
10914065 4448 return 1;
bdac5f58
TW
4449 }
4450 return 0;
4451}
4452
4453/* Generate the prologe and epilogue RTL if the machine supports it. Thread
4454 this into place with notes indicating where the prologue ends and where
4455 the epilogue begins. Update the basic block information when possible. */
4456
4457void
4458thread_prologue_and_epilogue_insns (f)
4459 rtx f;
4460{
4461#ifdef HAVE_prologue
4462 if (HAVE_prologue)
4463 {
4464 rtx head, seq, insn;
4465
4466 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
4467 prologue insns and a NOTE_INSN_PROLOGUE_END. */
4468 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
4469 seq = gen_prologue ();
4470 head = emit_insn_after (seq, f);
4471
4472 /* Include the new prologue insns in the first block. Ignore them
4473 if they form a basic block unto themselves. */
4474 if (basic_block_head && n_basic_blocks
4475 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
4476 basic_block_head[0] = NEXT_INSN (f);
4477
4478 /* Retain a map of the prologue insns. */
4479 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
4480 }
4481 else
4482#endif
4483 prologue = 0;
4484
4485#ifdef HAVE_epilogue
4486 if (HAVE_epilogue)
4487 {
4488 rtx insn = get_last_insn ();
4489 rtx prev = prev_nonnote_insn (insn);
4490
4491 /* If we end with a BARRIER, we don't need an epilogue. */
4492 if (! (prev && GET_CODE (prev) == BARRIER))
4493 {
4494 rtx tail, seq;
4495
4496 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG,
4497 the epilogue insns (this must include the jump insn that
4498 returns), USE insns ad the end of a function, and a BARRIER. */
4499
4500 emit_barrier_after (insn);
4501
4502 /* Place the epilogue before the USE insns at the end of a
4503 function. */
4504 while (prev
4505 && GET_CODE (prev) == INSN
4506 && GET_CODE (PATTERN (prev)) == USE)
4507 {
4508 insn = PREV_INSN (prev);
4509 prev = prev_nonnote_insn (prev);
4510 }
4511
4512 seq = gen_epilogue ();
4513 tail = emit_jump_insn_after (seq, insn);
4514 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
4515
4516 /* Include the new epilogue insns in the last block. Ignore
4517 them if they form a basic block unto themselves. */
4518 if (basic_block_end && n_basic_blocks
4519 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
4520 basic_block_end[n_basic_blocks - 1] = tail;
4521
4522 /* Retain a map of the epilogue insns. */
4523 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
4524 return;
4525 }
4526 }
4527#endif
4528 epilogue = 0;
4529}
4530
4531/* Reposition the prologue-end and epilogue-begin notes after instruction
4532 scheduling and delayed branch scheduling. */
4533
4534void
4535reposition_prologue_and_epilogue_notes (f)
4536 rtx f;
4537{
4538#if defined (HAVE_prologue) || defined (HAVE_epilogue)
4539 /* Reposition the prologue and epilogue notes. */
4540 if (n_basic_blocks)
4541 {
4542 rtx next, prev;
bf526252 4543 int len;
bdac5f58
TW
4544
4545 if (prologue)
4546 {
bf526252
RK
4547 register rtx insn, note = 0;
4548
4549 /* Scan from the beginning until we reach the last prologue insn.
4550 We apparently can't depend on basic_block_{head,end} after
4551 reorg has run. */
4552 for (len = 0; prologue[len]; len++)
4553 ;
4554 for (insn = f; insn; insn = NEXT_INSN (insn))
4555 if (GET_CODE (insn) == NOTE)
4556 {
4557 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
4558 note = insn;
4559 }
10914065 4560 else if ((len -= contains (insn, prologue)) == 0)
bdac5f58 4561 {
bf526252
RK
4562 /* Find the prologue-end note if we haven't already, and
4563 move it to just after the last prologue insn. */
4564 if (note == 0)
4565 for (note = insn; note = NEXT_INSN (note);)
4566 if (GET_CODE (note) == NOTE
4567 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
4568 break;
4569 next = NEXT_INSN (note);
4570 prev = PREV_INSN (note);
bdac5f58
TW
4571 if (prev)
4572 NEXT_INSN (prev) = next;
4573 if (next)
4574 PREV_INSN (next) = prev;
bf526252 4575 add_insn_after (note, insn);
bdac5f58
TW
4576 break;
4577 }
4578 }
4579
4580 if (epilogue)
4581 {
bf526252
RK
4582 register rtx insn, note = 0;
4583
4584 /* Scan from the end until we reach the first epilogue insn.
4585 We apparently can't depend on basic_block_{head,end} after
4586 reorg has run. */
4587 for (len = 0; epilogue[len]; len++)
4588 ;
4589 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
4590 if (GET_CODE (insn) == NOTE)
4591 {
4592 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4593 note = insn;
4594 }
10914065 4595 else if ((len -= contains (insn, epilogue)) == 0)
bdac5f58 4596 {
bf526252
RK
4597 /* Find the epilogue-begin note if we haven't already, and
4598 move it to just before the first epilogue insn. */
4599 if (note == 0)
4600 for (note = insn; note = PREV_INSN (note);)
4601 if (GET_CODE (note) == NOTE
4602 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
4603 break;
4604 next = NEXT_INSN (note);
4605 prev = PREV_INSN (note);
bdac5f58
TW
4606 if (prev)
4607 NEXT_INSN (prev) = next;
4608 if (next)
4609 PREV_INSN (next) = prev;
bf526252 4610 add_insn_after (note, PREV_INSN (insn));
bdac5f58
TW
4611 break;
4612 }
4613 }
4614 }
4615#endif /* HAVE_prologue or HAVE_epilogue */
4616}
This page took 0.533405 seconds and 5 git commands to generate.