]> gcc.gnu.org Git - gcc.git/blame - gcc/function.c
Don't look for *.h symlinks if the host doesn't have symlinks.
[gcc.git] / gcc / function.c
CommitLineData
6f086dfc 1/* Expands front end tree to back end RTL for GNU C-Compiler
1b2ac438 2 Copyright (C) 1987, 1988, 1989, 1991, 1992 Free Software Foundation, Inc.
6f086dfc
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
29
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
34
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
39
40#include "config.h"
41
42#include <stdio.h>
43
44#include "rtl.h"
45#include "tree.h"
46#include "flags.h"
47#include "function.h"
48#include "insn-flags.h"
49#include "expr.h"
50#include "insn-codes.h"
51#include "regs.h"
52#include "hard-reg-set.h"
53#include "insn-config.h"
54#include "recog.h"
55#include "output.h"
bdac5f58 56#include "basic-block.h"
6f086dfc
RS
57
58/* Round a value to the lowest integer less than it that is a multiple of
59 the required alignment. Avoid using division in case the value is
60 negative. Assume the alignment is a power of two. */
61#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
62
63/* Similar, but round to the next highest integer that meets the
64 alignment. */
65#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
66
67/* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
68 during rtl generation. If they are different register numbers, this is
69 always true. It may also be true if
70 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
71 generation. See fix_lexical_addr for details. */
72
73#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
74#define NEED_SEPARATE_AP
75#endif
76
77/* Number of bytes of args popped by function being compiled on its return.
78 Zero if no bytes are to be popped.
79 May affect compilation of return insn or of function epilogue. */
80
81int current_function_pops_args;
82
83/* Nonzero if function being compiled needs to be given an address
84 where the value should be stored. */
85
86int current_function_returns_struct;
87
88/* Nonzero if function being compiled needs to
89 return the address of where it has put a structure value. */
90
91int current_function_returns_pcc_struct;
92
93/* Nonzero if function being compiled needs to be passed a static chain. */
94
95int current_function_needs_context;
96
97/* Nonzero if function being compiled can call setjmp. */
98
99int current_function_calls_setjmp;
100
101/* Nonzero if function being compiled can call longjmp. */
102
103int current_function_calls_longjmp;
104
105/* Nonzero if function being compiled receives nonlocal gotos
106 from nested functions. */
107
108int current_function_has_nonlocal_label;
109
110/* Nonzero if function being compiled contains nested functions. */
111
112int current_function_contains_functions;
113
114/* Nonzero if function being compiled can call alloca,
115 either as a subroutine or builtin. */
116
117int current_function_calls_alloca;
118
119/* Nonzero if the current function returns a pointer type */
120
121int current_function_returns_pointer;
122
123/* If some insns can be deferred to the delay slots of the epilogue, the
124 delay list for them is recorded here. */
125
126rtx current_function_epilogue_delay_list;
127
128/* If function's args have a fixed size, this is that size, in bytes.
129 Otherwise, it is -1.
130 May affect compilation of return insn or of function epilogue. */
131
132int current_function_args_size;
133
134/* # bytes the prologue should push and pretend that the caller pushed them.
135 The prologue must do this, but only if parms can be passed in registers. */
136
137int current_function_pretend_args_size;
138
139/* # of bytes of outgoing arguments required to be pushed by the prologue.
140 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
141 and no stack adjusts will be done on function calls. */
142
143int current_function_outgoing_args_size;
144
145/* This is the offset from the arg pointer to the place where the first
146 anonymous arg can be found, if there is one. */
147
148rtx current_function_arg_offset_rtx;
149
150/* Nonzero if current function uses varargs.h or equivalent.
151 Zero for functions that use stdarg.h. */
152
153int current_function_varargs;
154
155/* Quantities of various kinds of registers
156 used for the current function's args. */
157
158CUMULATIVE_ARGS current_function_args_info;
159
160/* Name of function now being compiled. */
161
162char *current_function_name;
163
164/* If non-zero, an RTL expression for that location at which the current
165 function returns its result. Always equal to
166 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
167 independently of the tree structures. */
168
169rtx current_function_return_rtx;
170
171/* Nonzero if the current function uses the constant pool. */
172
173int current_function_uses_const_pool;
174
175/* Nonzero if the current function uses pic_offset_table_rtx. */
176int current_function_uses_pic_offset_table;
177
178/* The arg pointer hard register, or the pseudo into which it was copied. */
179rtx current_function_internal_arg_pointer;
180
181/* The FUNCTION_DECL for an inline function currently being expanded. */
182tree inline_function_decl;
183
184/* Number of function calls seen so far in current function. */
185
186int function_call_count;
187
188/* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
189 (labels to which there can be nonlocal gotos from nested functions)
190 in this function. */
191
192tree nonlocal_labels;
193
194/* RTX for stack slot that holds the current handler for nonlocal gotos.
195 Zero when function does not have nonlocal labels. */
196
197rtx nonlocal_goto_handler_slot;
198
199/* RTX for stack slot that holds the stack pointer value to restore
200 for a nonlocal goto.
201 Zero when function does not have nonlocal labels. */
202
203rtx nonlocal_goto_stack_level;
204
205/* Label that will go on parm cleanup code, if any.
206 Jumping to this label runs cleanup code for parameters, if
207 such code must be run. Following this code is the logical return label. */
208
209rtx cleanup_label;
210
211/* Label that will go on function epilogue.
212 Jumping to this label serves as a "return" instruction
213 on machines which require execution of the epilogue on all returns. */
214
215rtx return_label;
216
217/* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
218 So we can mark them all live at the end of the function, if nonopt. */
219rtx save_expr_regs;
220
221/* List (chain of EXPR_LISTs) of all stack slots in this function.
222 Made for the sake of unshare_all_rtl. */
223rtx stack_slot_list;
224
225/* Chain of all RTL_EXPRs that have insns in them. */
226tree rtl_expr_chain;
227
228/* Label to jump back to for tail recursion, or 0 if we have
229 not yet needed one for this function. */
230rtx tail_recursion_label;
231
232/* Place after which to insert the tail_recursion_label if we need one. */
233rtx tail_recursion_reentry;
234
235/* Location at which to save the argument pointer if it will need to be
236 referenced. There are two cases where this is done: if nonlocal gotos
237 exist, or if vars stored at an offset from the argument pointer will be
238 needed by inner routines. */
239
240rtx arg_pointer_save_area;
241
242/* Offset to end of allocated area of stack frame.
243 If stack grows down, this is the address of the last stack slot allocated.
244 If stack grows up, this is the address for the next slot. */
245int frame_offset;
246
247/* List (chain of TREE_LISTs) of static chains for containing functions.
248 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
249 in an RTL_EXPR in the TREE_VALUE. */
250static tree context_display;
251
252/* List (chain of TREE_LISTs) of trampolines for nested functions.
253 The trampoline sets up the static chain and jumps to the function.
254 We supply the trampoline's address when the function's address is requested.
255
256 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
257 in an RTL_EXPR in the TREE_VALUE. */
258static tree trampoline_list;
259
260/* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
261static rtx parm_birth_insn;
262
263#if 0
264/* Nonzero if a stack slot has been generated whose address is not
265 actually valid. It means that the generated rtl must all be scanned
266 to detect and correct the invalid addresses where they occur. */
267static int invalid_stack_slot;
268#endif
269
270/* Last insn of those whose job was to put parms into their nominal homes. */
271static rtx last_parm_insn;
272
273/* 1 + last pseudo register number used for loading a copy
274 of a parameter of this function. */
275static int max_parm_reg;
276
277/* Vector indexed by REGNO, containing location on stack in which
278 to put the parm which is nominally in pseudo register REGNO,
279 if we discover that that parm must go in the stack. */
280static rtx *parm_reg_stack_loc;
281
282#if 0 /* Turned off because 0 seems to work just as well. */
283/* Cleanup lists are required for binding levels regardless of whether
284 that binding level has cleanups or not. This node serves as the
285 cleanup list whenever an empty list is required. */
286static tree empty_cleanup_list;
287#endif
288
289/* Nonzero once virtual register instantiation has been done.
290 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
291static int virtuals_instantiated;
292
293/* Nonzero if we need to distinguish between the return value of this function
294 and the return value of a function called by this function. This helps
295 integrate.c */
296
297extern int rtx_equal_function_value_matters;
298
299void fixup_gotos ();
300
301static tree round_down ();
302static rtx round_trampoline_addr ();
303static rtx fixup_stack_1 ();
304static void fixup_var_refs ();
305static void fixup_var_refs_insns ();
306static void fixup_var_refs_1 ();
307static void optimize_bit_field ();
308static void instantiate_decls ();
309static void instantiate_decls_1 ();
5a73491b 310static void instantiate_decl ();
6f086dfc
RS
311static int instantiate_virtual_regs_1 ();
312static rtx fixup_memory_subreg ();
313static rtx walk_fixup_memory_subreg ();
314\f
315/* In order to evaluate some expressions, such as function calls returning
316 structures in memory, we need to temporarily allocate stack locations.
317 We record each allocated temporary in the following structure.
318
319 Associated with each temporary slot is a nesting level. When we pop up
320 one level, all temporaries associated with the previous level are freed.
321 Normally, all temporaries are freed after the execution of the statement
322 in which they were created. However, if we are inside a ({...}) grouping,
323 the result may be in a temporary and hence must be preserved. If the
324 result could be in a temporary, we preserve it if we can determine which
325 one it is in. If we cannot determine which temporary may contain the
326 result, all temporaries are preserved. A temporary is preserved by
327 pretending it was allocated at the previous nesting level.
328
329 Automatic variables are also assigned temporary slots, at the nesting
330 level where they are defined. They are marked a "kept" so that
331 free_temp_slots will not free them. */
332
333struct temp_slot
334{
335 /* Points to next temporary slot. */
336 struct temp_slot *next;
337 /* The rtx to used to reference the slot. */
338 rtx slot;
339 /* The size, in units, of the slot. */
340 int size;
341 /* Non-zero if this temporary is currently in use. */
342 char in_use;
343 /* Nesting level at which this slot is being used. */
344 int level;
345 /* Non-zero if this should survive a call to free_temp_slots. */
346 int keep;
347};
348
349/* List of all temporaries allocated, both available and in use. */
350
351struct temp_slot *temp_slots;
352
353/* Current nesting level for temporaries. */
354
355int temp_slot_level;
356\f
357/* Pointer to chain of `struct function' for containing functions. */
358struct function *outer_function_chain;
359
360/* Given a function decl for a containing function,
361 return the `struct function' for it. */
362
363struct function *
364find_function_data (decl)
365 tree decl;
366{
367 struct function *p;
368 for (p = outer_function_chain; p; p = p->next)
369 if (p->decl == decl)
370 return p;
371 abort ();
372}
373
374/* Save the current context for compilation of a nested function.
375 This is called from language-specific code.
376 The caller is responsible for saving any language-specific status,
6dc42e49 377 since this function knows only about language-independent variables. */
6f086dfc
RS
378
379void
380push_function_context ()
381{
382 struct function *p = (struct function *) xmalloc (sizeof (struct function));
383
384 p->next = outer_function_chain;
385 outer_function_chain = p;
386
387 p->name = current_function_name;
388 p->decl = current_function_decl;
389 p->pops_args = current_function_pops_args;
390 p->returns_struct = current_function_returns_struct;
391 p->returns_pcc_struct = current_function_returns_pcc_struct;
392 p->needs_context = current_function_needs_context;
393 p->calls_setjmp = current_function_calls_setjmp;
394 p->calls_longjmp = current_function_calls_longjmp;
395 p->calls_alloca = current_function_calls_alloca;
396 p->has_nonlocal_label = current_function_has_nonlocal_label;
397 p->args_size = current_function_args_size;
398 p->pretend_args_size = current_function_pretend_args_size;
399 p->arg_offset_rtx = current_function_arg_offset_rtx;
400 p->uses_const_pool = current_function_uses_const_pool;
401 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
402 p->internal_arg_pointer = current_function_internal_arg_pointer;
403 p->max_parm_reg = max_parm_reg;
404 p->parm_reg_stack_loc = parm_reg_stack_loc;
405 p->outgoing_args_size = current_function_outgoing_args_size;
406 p->return_rtx = current_function_return_rtx;
407 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
408 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
409 p->nonlocal_labels = nonlocal_labels;
410 p->cleanup_label = cleanup_label;
411 p->return_label = return_label;
412 p->save_expr_regs = save_expr_regs;
413 p->stack_slot_list = stack_slot_list;
414 p->parm_birth_insn = parm_birth_insn;
415 p->frame_offset = frame_offset;
416 p->tail_recursion_label = tail_recursion_label;
417 p->tail_recursion_reentry = tail_recursion_reentry;
418 p->arg_pointer_save_area = arg_pointer_save_area;
419 p->rtl_expr_chain = rtl_expr_chain;
420 p->last_parm_insn = last_parm_insn;
421 p->context_display = context_display;
422 p->trampoline_list = trampoline_list;
423 p->function_call_count = function_call_count;
424 p->temp_slots = temp_slots;
425 p->temp_slot_level = temp_slot_level;
426 p->fixup_var_refs_queue = 0;
427
428 save_tree_status (p);
429 save_storage_status (p);
430 save_emit_status (p);
431 init_emit ();
432 save_expr_status (p);
433 save_stmt_status (p);
434}
435
436/* Restore the last saved context, at the end of a nested function.
437 This function is called from language-specific code. */
438
439void
440pop_function_context ()
441{
442 struct function *p = outer_function_chain;
443
444 outer_function_chain = p->next;
445
446 current_function_name = p->name;
447 current_function_decl = p->decl;
448 current_function_pops_args = p->pops_args;
449 current_function_returns_struct = p->returns_struct;
450 current_function_returns_pcc_struct = p->returns_pcc_struct;
451 current_function_needs_context = p->needs_context;
452 current_function_calls_setjmp = p->calls_setjmp;
453 current_function_calls_longjmp = p->calls_longjmp;
454 current_function_calls_alloca = p->calls_alloca;
455 current_function_has_nonlocal_label = p->has_nonlocal_label;
456 current_function_contains_functions = 1;
457 current_function_args_size = p->args_size;
458 current_function_pretend_args_size = p->pretend_args_size;
459 current_function_arg_offset_rtx = p->arg_offset_rtx;
460 current_function_uses_const_pool = p->uses_const_pool;
461 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
462 current_function_internal_arg_pointer = p->internal_arg_pointer;
463 max_parm_reg = p->max_parm_reg;
464 parm_reg_stack_loc = p->parm_reg_stack_loc;
465 current_function_outgoing_args_size = p->outgoing_args_size;
466 current_function_return_rtx = p->return_rtx;
467 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
468 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
469 nonlocal_labels = p->nonlocal_labels;
470 cleanup_label = p->cleanup_label;
471 return_label = p->return_label;
472 save_expr_regs = p->save_expr_regs;
473 stack_slot_list = p->stack_slot_list;
474 parm_birth_insn = p->parm_birth_insn;
475 frame_offset = p->frame_offset;
476 tail_recursion_label = p->tail_recursion_label;
477 tail_recursion_reentry = p->tail_recursion_reentry;
478 arg_pointer_save_area = p->arg_pointer_save_area;
479 rtl_expr_chain = p->rtl_expr_chain;
480 last_parm_insn = p->last_parm_insn;
481 context_display = p->context_display;
482 trampoline_list = p->trampoline_list;
483 function_call_count = p->function_call_count;
484 temp_slots = p->temp_slots;
485 temp_slot_level = p->temp_slot_level;
486
487 restore_tree_status (p);
488 restore_storage_status (p);
489 restore_expr_status (p);
490 restore_emit_status (p);
491 restore_stmt_status (p);
492
493 /* Finish doing put_var_into_stack for any of our variables
494 which became addressable during the nested function. */
495 {
496 struct var_refs_queue *queue = p->fixup_var_refs_queue;
497 for (; queue; queue = queue->next)
498 fixup_var_refs (queue->modified);
499 }
500
501 free (p);
502
503 /* Reset variables that have known state during rtx generation. */
504 rtx_equal_function_value_matters = 1;
505 virtuals_instantiated = 0;
506}
507\f
508/* Allocate fixed slots in the stack frame of the current function. */
509
510/* Return size needed for stack frame based on slots so far allocated.
511 This size counts from zero. It is not rounded to STACK_BOUNDARY;
512 the caller may have to do that. */
513
514int
515get_frame_size ()
516{
517#ifdef FRAME_GROWS_DOWNWARD
518 return -frame_offset;
519#else
520 return frame_offset;
521#endif
522}
523
524/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
525 with machine mode MODE.
526
527 ALIGN controls the amount of alignment for the address of the slot:
528 0 means according to MODE,
529 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
530 positive specifies alignment boundary in bits.
531
532 We do not round to stack_boundary here. */
533
534rtx
535assign_stack_local (mode, size, align)
536 enum machine_mode mode;
537 int size;
538 int align;
539{
540 register rtx x, addr;
541 int bigend_correction = 0;
542 int alignment;
543
544 if (align == 0)
545 {
546 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
547 if (mode == BLKmode)
548 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
549 }
550 else if (align == -1)
551 {
552 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
553 size = CEIL_ROUND (size, alignment);
554 }
555 else
556 alignment = align / BITS_PER_UNIT;
557
6f086dfc
RS
558 /* Round frame offset to that alignment.
559 We must be careful here, since FRAME_OFFSET might be negative and
560 division with a negative dividend isn't as well defined as we might
561 like. So we instead assume that ALIGNMENT is a power of two and
562 use logical operations which are unambiguous. */
563#ifdef FRAME_GROWS_DOWNWARD
564 frame_offset = FLOOR_ROUND (frame_offset, alignment);
565#else
566 frame_offset = CEIL_ROUND (frame_offset, alignment);
567#endif
568
569 /* On a big-endian machine, if we are allocating more space than we will use,
570 use the least significant bytes of those that are allocated. */
571#if BYTES_BIG_ENDIAN
572 if (mode != BLKmode)
573 bigend_correction = size - GET_MODE_SIZE (mode);
574#endif
575
576#ifdef FRAME_GROWS_DOWNWARD
577 frame_offset -= size;
578#endif
579
580 /* If we have already instantiated virtual registers, return the actual
581 address relative to the frame pointer. */
582 if (virtuals_instantiated)
583 addr = plus_constant (frame_pointer_rtx,
584 (frame_offset + bigend_correction
585 + STARTING_FRAME_OFFSET));
586 else
587 addr = plus_constant (virtual_stack_vars_rtx,
588 frame_offset + bigend_correction);
589
590#ifndef FRAME_GROWS_DOWNWARD
591 frame_offset += size;
592#endif
593
594 x = gen_rtx (MEM, mode, addr);
595
596 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
597
598 return x;
599}
600
601/* Assign a stack slot in a containing function.
602 First three arguments are same as in preceding function.
603 The last argument specifies the function to allocate in. */
604
605rtx
606assign_outer_stack_local (mode, size, align, function)
607 enum machine_mode mode;
608 int size;
609 int align;
610 struct function *function;
611{
612 register rtx x, addr;
613 int bigend_correction = 0;
614 int alignment;
615
616 /* Allocate in the memory associated with the function in whose frame
617 we are assigning. */
618 push_obstacks (function->function_obstack,
619 function->function_maybepermanent_obstack);
620
621 if (align == 0)
622 {
623 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
624 if (mode == BLKmode)
625 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
626 }
627 else if (align == -1)
628 {
629 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
630 size = CEIL_ROUND (size, alignment);
631 }
632 else
633 alignment = align / BITS_PER_UNIT;
634
6f086dfc
RS
635 /* Round frame offset to that alignment. */
636#ifdef FRAME_GROWS_DOWNWARD
637 frame_offset = FLOOR_ROUND (frame_offset, alignment);
638#else
639 frame_offset = CEIL_ROUND (frame_offset, alignment);
640#endif
641
642 /* On a big-endian machine, if we are allocating more space than we will use,
643 use the least significant bytes of those that are allocated. */
644#if BYTES_BIG_ENDIAN
645 if (mode != BLKmode)
646 bigend_correction = size - GET_MODE_SIZE (mode);
647#endif
648
649#ifdef FRAME_GROWS_DOWNWARD
650 function->frame_offset -= size;
651#endif
652 addr = plus_constant (virtual_stack_vars_rtx,
653 function->frame_offset + bigend_correction);
654#ifndef FRAME_GROWS_DOWNWARD
655 function->frame_offset += size;
656#endif
657
658 x = gen_rtx (MEM, mode, addr);
659
660 function->stack_slot_list
661 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
662
663 pop_obstacks ();
664
665 return x;
666}
667\f
668/* Allocate a temporary stack slot and record it for possible later
669 reuse.
670
671 MODE is the machine mode to be given to the returned rtx.
672
673 SIZE is the size in units of the space required. We do no rounding here
674 since assign_stack_local will do any required rounding.
675
676 KEEP is non-zero if this slot is to be retained after a call to
677 free_temp_slots. Automatic variables for a block are allocated with this
678 flag. */
679
680rtx
681assign_stack_temp (mode, size, keep)
682 enum machine_mode mode;
683 int size;
684 int keep;
685{
686 struct temp_slot *p, *best_p = 0;
687
688 /* First try to find an available, already-allocated temporary that is the
689 exact size we require. */
690 for (p = temp_slots; p; p = p->next)
691 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
692 break;
693
694 /* If we didn't find, one, try one that is larger than what we want. We
695 find the smallest such. */
696 if (p == 0)
697 for (p = temp_slots; p; p = p->next)
698 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
699 && (best_p == 0 || best_p->size > p->size))
700 best_p = p;
701
702 /* Make our best, if any, the one to use. */
703 if (best_p)
704 p = best_p;
705
706 /* If we still didn't find one, make a new temporary. */
707 if (p == 0)
708 {
709 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
710 p->size = size;
711 /* If the temp slot mode doesn't indicate the alignment,
712 use the largest possible, so no one will be disappointed. */
713 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
714 p->next = temp_slots;
715 temp_slots = p;
716 }
717
718 p->in_use = 1;
719 p->level = temp_slot_level;
720 p->keep = keep;
721 return p->slot;
722}
723\f
724/* If X could be a reference to a temporary slot, mark that slot as belonging
725 to the to one level higher. If X matched one of our slots, just mark that
726 one. Otherwise, we can't easily predict which it is, so upgrade all of
727 them. Kept slots need not be touched.
728
729 This is called when an ({...}) construct occurs and a statement
730 returns a value in memory. */
731
732void
733preserve_temp_slots (x)
734 rtx x;
735{
736 struct temp_slot *p;
737
738 /* If X is not in memory or is at a constant address, it cannot be in
739 a temporary slot. */
740 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
741 return;
742
743 /* First see if we can find a match. */
744 for (p = temp_slots; p; p = p->next)
745 if (p->in_use && x == p->slot)
746 {
747 p->level--;
748 return;
749 }
750
751 /* Otherwise, preserve all non-kept slots at this level. */
752 for (p = temp_slots; p; p = p->next)
753 if (p->in_use && p->level == temp_slot_level && ! p->keep)
754 p->level--;
755}
756
757/* Free all temporaries used so far. This is normally called at the end
758 of generating code for a statement. */
759
760void
761free_temp_slots ()
762{
763 struct temp_slot *p;
764
765 for (p = temp_slots; p; p = p->next)
766 if (p->in_use && p->level == temp_slot_level && ! p->keep)
767 p->in_use = 0;
768}
769
770/* Push deeper into the nesting level for stack temporaries. */
771
772void
773push_temp_slots ()
774{
775 /* For GNU C++, we must allow a sequence to be emitted anywhere in
776 the level where the sequence was started. By not changing levels
777 when the compiler is inside a sequence, the temporaries for the
778 sequence and the temporaries will not unwittingly conflict with
779 the temporaries for other sequences and/or code at that level. */
780 if (in_sequence_p ())
781 return;
782
783 temp_slot_level++;
784}
785
786/* Pop a temporary nesting level. All slots in use in the current level
787 are freed. */
788
789void
790pop_temp_slots ()
791{
792 struct temp_slot *p;
793
794 /* See comment in push_temp_slots about why we don't change levels
795 in sequences. */
796 if (in_sequence_p ())
797 return;
798
799 for (p = temp_slots; p; p = p->next)
800 if (p->in_use && p->level == temp_slot_level)
801 p->in_use = 0;
802
803 temp_slot_level--;
804}
805\f
806/* Retroactively move an auto variable from a register to a stack slot.
807 This is done when an address-reference to the variable is seen. */
808
809void
810put_var_into_stack (decl)
811 tree decl;
812{
813 register rtx reg;
814 register rtx new = 0;
815 struct function *function = 0;
816 tree context = decl_function_context (decl);
817
818 /* Get the current rtl used for this object. */
819 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
820
821 /* If this variable comes from an outer function,
822 find that function's saved context. */
823 if (context != current_function_decl)
824 for (function = outer_function_chain; function; function = function->next)
825 if (function->decl == context)
826 break;
827
828 /* No need to do anything if decl has no rtx yet
829 since in that case caller is setting TREE_ADDRESSABLE
830 and a stack slot will be assigned when the rtl is made. */
831 if (reg == 0)
832 return;
833
834 /* If this is a variable-size object with a pseudo to address it,
835 put that pseudo into the stack, if the var is nonlocal. */
a82ad570 836 if (DECL_NONLOCAL (decl)
6f086dfc
RS
837 && GET_CODE (reg) == MEM
838 && GET_CODE (XEXP (reg, 0)) == REG
839 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
840 reg = XEXP (reg, 0);
841 if (GET_CODE (reg) != REG)
842 return;
843
844 if (function)
845 {
846 if (REGNO (reg) < function->max_parm_reg)
847 new = function->parm_reg_stack_loc[REGNO (reg)];
848 if (new == 0)
849 new = assign_outer_stack_local (GET_MODE (reg),
850 GET_MODE_SIZE (GET_MODE (reg)),
851 0, function);
852 }
853 else
854 {
855 if (REGNO (reg) < max_parm_reg)
856 new = parm_reg_stack_loc[REGNO (reg)];
857 if (new == 0)
858 new = assign_stack_local (GET_MODE (reg),
859 GET_MODE_SIZE (GET_MODE (reg)),
860 0);
861 }
862
863 XEXP (reg, 0) = XEXP (new, 0);
864 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
865 REG_USERVAR_P (reg) = 0;
866 PUT_CODE (reg, MEM);
867
868 /* If this is a memory ref that contains aggregate components,
869 mark it as such for cse and loop optimize. */
870 MEM_IN_STRUCT_P (reg)
871 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
872 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
873 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
874
875 /* Now make sure that all refs to the variable, previously made
876 when it was a register, are fixed up to be valid again. */
877 if (function)
878 {
879 struct var_refs_queue *temp;
880
881 /* Variable is inherited; fix it up when we get back to its function. */
882 push_obstacks (function->function_obstack,
883 function->function_maybepermanent_obstack);
884 temp
885 = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue));
886 temp->modified = reg;
887 temp->next = function->fixup_var_refs_queue;
888 function->fixup_var_refs_queue = temp;
889 pop_obstacks ();
890 }
891 else
892 /* Variable is local; fix it up now. */
893 fixup_var_refs (reg);
894}
895\f
896static void
897fixup_var_refs (var)
898 rtx var;
899{
900 tree pending;
901 rtx first_insn = get_insns ();
902 struct sequence_stack *stack = sequence_stack;
903 tree rtl_exps = rtl_expr_chain;
904
905 /* Must scan all insns for stack-refs that exceed the limit. */
906 fixup_var_refs_insns (var, first_insn, stack == 0);
907
908 /* Scan all pending sequences too. */
909 for (; stack; stack = stack->next)
910 {
911 push_to_sequence (stack->first);
912 fixup_var_refs_insns (var, stack->first, stack->next != 0);
913 /* Update remembered end of sequence
914 in case we added an insn at the end. */
915 stack->last = get_last_insn ();
916 end_sequence ();
917 }
918
919 /* Scan all waiting RTL_EXPRs too. */
920 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
921 {
922 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
923 if (seq != const0_rtx && seq != 0)
924 {
925 push_to_sequence (seq);
926 fixup_var_refs_insns (var, seq, 0);
927 end_sequence ();
928 }
929 }
930}
931\f
932/* This structure is used by the following two functions to record MEMs or
933 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
934 VAR as an address. We need to maintain this list in case two operands of
935 an insn were required to match; in that case we must ensure we use the
936 same replacement. */
937
938struct fixup_replacement
939{
940 rtx old;
941 rtx new;
942 struct fixup_replacement *next;
943};
944
945/* REPLACEMENTS is a pointer to a list of the above structures and X is
946 some part of an insn. Return a struct fixup_replacement whose OLD
947 value is equal to X. Allocate a new structure if no such entry exists. */
948
949static struct fixup_replacement *
950find_replacement (replacements, x)
951 struct fixup_replacement **replacements;
952 rtx x;
953{
954 struct fixup_replacement *p;
955
956 /* See if we have already replaced this. */
957 for (p = *replacements; p && p->old != x; p = p->next)
958 ;
959
960 if (p == 0)
961 {
962 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
963 p->old = x;
964 p->new = 0;
965 p->next = *replacements;
966 *replacements = p;
967 }
968
969 return p;
970}
971
972/* Scan the insn-chain starting with INSN for refs to VAR
973 and fix them up. TOPLEVEL is nonzero if this chain is the
974 main chain of insns for the current function. */
975
976static void
977fixup_var_refs_insns (var, insn, toplevel)
978 rtx var;
979 rtx insn;
980 int toplevel;
981{
982 while (insn)
983 {
984 rtx next = NEXT_INSN (insn);
985 rtx note;
986 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
987 || GET_CODE (insn) == JUMP_INSN)
988 {
989 /* The insn to load VAR from a home in the arglist
990 is now a no-op. When we see it, just delete it. */
991 if (toplevel
992 && GET_CODE (PATTERN (insn)) == SET
993 && SET_DEST (PATTERN (insn)) == var
994 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
995 {
996 next = delete_insn (insn);
997 if (insn == last_parm_insn)
998 last_parm_insn = PREV_INSN (next);
999 }
1000 else
1001 {
1002 /* See if we have to do anything to INSN now that VAR is in
1003 memory. If it needs to be loaded into a pseudo, use a single
1004 pseudo for the entire insn in case there is a MATCH_DUP
1005 between two operands. We pass a pointer to the head of
1006 a list of struct fixup_replacements. If fixup_var_refs_1
1007 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1008 it will record them in this list.
1009
1010 If it allocated a pseudo for any replacement, we copy into
1011 it here. */
1012
1013 struct fixup_replacement *replacements = 0;
1014
1015 fixup_var_refs_1 (var, &PATTERN (insn), insn, &replacements);
1016
1017 while (replacements)
1018 {
1019 if (GET_CODE (replacements->new) == REG)
1020 {
1021 rtx insert_before;
1022
1023 /* OLD might be a (subreg (mem)). */
1024 if (GET_CODE (replacements->old) == SUBREG)
1025 replacements->old
1026 = fixup_memory_subreg (replacements->old, insn, 0);
1027 else
1028 replacements->old
1029 = fixup_stack_1 (replacements->old, insn);
1030
1031 /* We can not separate USE insns from the CALL_INSN
1032 that they belong to. If this is a CALL_INSN, insert
b335c2cc 1033 the move insn before the USE insns preceding it
6f086dfc
RS
1034 instead of immediately before the insn. */
1035 if (GET_CODE (insn) == CALL_INSN)
1036 {
1037 insert_before = insn;
1038 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1039 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1040 insert_before = PREV_INSN (insert_before);
1041 }
1042 else
1043 insert_before = insn;
1044
1045 emit_insn_before (gen_move_insn (replacements->new,
1046 replacements->old),
1047 insert_before);
1048 }
1049
1050 replacements = replacements->next;
1051 }
1052 }
1053
1054 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1055 But don't touch other insns referred to by reg-notes;
1056 we will get them elsewhere. */
1057 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1058 if (GET_CODE (note) != INSN_LIST)
1059 XEXP (note, 0) = walk_fixup_memory_subreg (XEXP (note, 0), insn);
1060 }
1061 insn = next;
1062 }
1063}
1064\f
1065/* VAR is a MEM that used to be a pseudo register. See if the rtx expression
1066 at *LOC in INSN needs to be changed.
1067
1068 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1069 contain a list of original rtx's and replacements. If we find that we need
1070 to modify this insn by replacing a memory reference with a pseudo or by
1071 making a new MEM to implement a SUBREG, we consult that list to see if
1072 we have already chosen a replacement. If none has already been allocated,
1073 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1074 or the SUBREG, as appropriate, to the pseudo. */
1075
1076static void
1077fixup_var_refs_1 (var, loc, insn, replacements)
1078 register rtx var;
1079 register rtx *loc;
1080 rtx insn;
1081 struct fixup_replacement **replacements;
1082{
1083 register int i;
1084 register rtx x = *loc;
1085 RTX_CODE code = GET_CODE (x);
1086 register char *fmt;
1087 register rtx tem, tem1;
1088 struct fixup_replacement *replacement;
1089
1090 switch (code)
1091 {
1092 case MEM:
1093 if (var == x)
1094 {
1095 /* If we already have a replacement, use it. Otherwise,
1096 try to fix up this address in case it is invalid. */
1097
1098 replacement = find_replacement (replacements, var);
1099 if (replacement->new)
1100 {
1101 *loc = replacement->new;
1102 return;
1103 }
1104
1105 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1106
1107 /* Unless we are forcing memory to register, we can leave things
1108 the way they are if the insn is valid. */
1109
1110 INSN_CODE (insn) = -1;
1111 if (! flag_force_mem && recog_memoized (insn) >= 0)
1112 return;
1113
1114 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1115 return;
1116 }
1117
1118 /* If X contains VAR, we need to unshare it here so that we update
1119 each occurrence separately. But all identical MEMs in one insn
1120 must be replaced with the same rtx because of the possibility of
1121 MATCH_DUPs. */
1122
1123 if (reg_mentioned_p (var, x))
1124 {
1125 replacement = find_replacement (replacements, x);
1126 if (replacement->new == 0)
1127 replacement->new = copy_most_rtx (x, var);
1128
1129 *loc = x = replacement->new;
1130 }
1131 break;
1132
1133 case REG:
1134 case CC0:
1135 case PC:
1136 case CONST_INT:
1137 case CONST:
1138 case SYMBOL_REF:
1139 case LABEL_REF:
1140 case CONST_DOUBLE:
1141 return;
1142
1143 case SIGN_EXTRACT:
1144 case ZERO_EXTRACT:
1145 /* Note that in some cases those types of expressions are altered
1146 by optimize_bit_field, and do not survive to get here. */
1147 if (XEXP (x, 0) == var
1148 || (GET_CODE (XEXP (x, 0)) == SUBREG
1149 && SUBREG_REG (XEXP (x, 0)) == var))
1150 {
1151 /* Get TEM as a valid MEM in the mode presently in the insn.
1152
1153 We don't worry about the possibility of MATCH_DUP here; it
1154 is highly unlikely and would be tricky to handle. */
1155
1156 tem = XEXP (x, 0);
1157 if (GET_CODE (tem) == SUBREG)
1158 tem = fixup_memory_subreg (tem, insn, 1);
1159 tem = fixup_stack_1 (tem, insn);
1160
1161 /* Unless we want to load from memory, get TEM into the proper mode
1162 for an extract from memory. This can only be done if the
1163 extract is at a constant position and length. */
1164
1165 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1166 && GET_CODE (XEXP (x, 2)) == CONST_INT
1167 && ! mode_dependent_address_p (XEXP (tem, 0))
1168 && ! MEM_VOLATILE_P (tem))
1169 {
1170 enum machine_mode wanted_mode = VOIDmode;
1171 enum machine_mode is_mode = GET_MODE (tem);
1172 int width = INTVAL (XEXP (x, 1));
1173 int pos = INTVAL (XEXP (x, 2));
1174
1175#ifdef HAVE_extzv
1176 if (GET_CODE (x) == ZERO_EXTRACT)
1177 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1178#endif
1179#ifdef HAVE_extv
1180 if (GET_CODE (x) == SIGN_EXTRACT)
1181 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1182#endif
6dc42e49 1183 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
1184 if (wanted_mode != VOIDmode
1185 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1186 {
1187 int offset = pos / BITS_PER_UNIT;
1188 rtx old_pos = XEXP (x, 2);
1189 rtx newmem;
1190
1191 /* If the bytes and bits are counted differently, we
1192 must adjust the offset. */
1193#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1194 offset = (GET_MODE_SIZE (is_mode)
1195 - GET_MODE_SIZE (wanted_mode) - offset);
1196#endif
1197
1198 pos %= GET_MODE_BITSIZE (wanted_mode);
1199
1200 newmem = gen_rtx (MEM, wanted_mode,
1201 plus_constant (XEXP (tem, 0), offset));
1202 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1203 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1204 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1205
1206 /* Make the change and see if the insn remains valid. */
1207 INSN_CODE (insn) = -1;
1208 XEXP (x, 0) = newmem;
5f4f0e22 1209 XEXP (x, 2) = GEN_INT (pos);
6f086dfc
RS
1210
1211 if (recog_memoized (insn) >= 0)
1212 return;
1213
1214 /* Otherwise, restore old position. XEXP (x, 0) will be
1215 restored later. */
1216 XEXP (x, 2) = old_pos;
1217 }
1218 }
1219
1220 /* If we get here, the bitfield extract insn can't accept a memory
1221 reference. Copy the input into a register. */
1222
1223 tem1 = gen_reg_rtx (GET_MODE (tem));
1224 emit_insn_before (gen_move_insn (tem1, tem), insn);
1225 XEXP (x, 0) = tem1;
1226 return;
1227 }
1228 break;
1229
1230 case SUBREG:
1231 if (SUBREG_REG (x) == var)
1232 {
1233 /* If this SUBREG makes VAR wider, it has become a paradoxical
1234 SUBREG with VAR in memory, but these aren't allowed at this
1235 stage of the compilation. So load VAR into a pseudo and take
1236 a SUBREG of that pseudo. */
1237 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1238 {
1239 replacement = find_replacement (replacements, var);
1240 if (replacement->new == 0)
1241 replacement->new = gen_reg_rtx (GET_MODE (var));
1242 SUBREG_REG (x) = replacement->new;
1243 return;
1244 }
1245
1246 /* See if we have already found a replacement for this SUBREG.
1247 If so, use it. Otherwise, make a MEM and see if the insn
1248 is recognized. If not, or if we should force MEM into a register,
1249 make a pseudo for this SUBREG. */
1250 replacement = find_replacement (replacements, x);
1251 if (replacement->new)
1252 {
1253 *loc = replacement->new;
1254 return;
1255 }
1256
1257 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1258
1259 if (! flag_force_mem && recog_memoized (insn) >= 0)
1260 return;
1261
1262 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1263 return;
1264 }
1265 break;
1266
1267 case SET:
1268 /* First do special simplification of bit-field references. */
1269 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1270 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1271 optimize_bit_field (x, insn, 0);
1272 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1273 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
5f4f0e22 1274 optimize_bit_field (x, insn, NULL_PTR);
6f086dfc
RS
1275
1276 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1277 insn into a pseudo and store the low part of the pseudo into VAR. */
1278 if (GET_CODE (SET_DEST (x)) == SUBREG
1279 && SUBREG_REG (SET_DEST (x)) == var
1280 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1281 > GET_MODE_SIZE (GET_MODE (var))))
1282 {
1283 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1284 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1285 tem)),
1286 insn);
1287 break;
1288 }
1289
1290 {
1291 rtx dest = SET_DEST (x);
1292 rtx src = SET_SRC (x);
1293 rtx outerdest = dest;
1294
1295 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1296 || GET_CODE (dest) == SIGN_EXTRACT
1297 || GET_CODE (dest) == ZERO_EXTRACT)
1298 dest = XEXP (dest, 0);
1299
1300 if (GET_CODE (src) == SUBREG)
1301 src = XEXP (src, 0);
1302
1303 /* If VAR does not appear at the top level of the SET
1304 just scan the lower levels of the tree. */
1305
1306 if (src != var && dest != var)
1307 break;
1308
1309 /* We will need to rerecognize this insn. */
1310 INSN_CODE (insn) = -1;
1311
1312#ifdef HAVE_insv
1313 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1314 {
1315 /* Since this case will return, ensure we fixup all the
1316 operands here. */
1317 fixup_var_refs_1 (var, &XEXP (outerdest, 1), insn, replacements);
1318 fixup_var_refs_1 (var, &XEXP (outerdest, 2), insn, replacements);
1319 fixup_var_refs_1 (var, &SET_SRC (x), insn, replacements);
1320
1321 tem = XEXP (outerdest, 0);
1322
1323 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1324 that may appear inside a ZERO_EXTRACT.
1325 This was legitimate when the MEM was a REG. */
1326 if (GET_CODE (tem) == SUBREG
1327 && SUBREG_REG (tem) == var)
1328 tem = fixup_memory_subreg (tem, insn, 1);
1329 else
1330 tem = fixup_stack_1 (tem, insn);
1331
1332 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1333 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1334 && ! mode_dependent_address_p (XEXP (tem, 0))
1335 && ! MEM_VOLATILE_P (tem))
1336 {
1337 enum machine_mode wanted_mode
1338 = insn_operand_mode[(int) CODE_FOR_insv][0];
1339 enum machine_mode is_mode = GET_MODE (tem);
1340 int width = INTVAL (XEXP (outerdest, 1));
1341 int pos = INTVAL (XEXP (outerdest, 2));
1342
6dc42e49 1343 /* If we have a narrower mode, we can do something. */
6f086dfc
RS
1344 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1345 {
1346 int offset = pos / BITS_PER_UNIT;
1347 rtx old_pos = XEXP (outerdest, 2);
1348 rtx newmem;
1349
1350#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1351 offset = (GET_MODE_SIZE (is_mode)
1352 - GET_MODE_SIZE (wanted_mode) - offset);
1353#endif
1354
1355 pos %= GET_MODE_BITSIZE (wanted_mode);
1356
1357 newmem = gen_rtx (MEM, wanted_mode,
1358 plus_constant (XEXP (tem, 0), offset));
1359 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1360 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1361 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1362
1363 /* Make the change and see if the insn remains valid. */
1364 INSN_CODE (insn) = -1;
1365 XEXP (outerdest, 0) = newmem;
5f4f0e22 1366 XEXP (outerdest, 2) = GEN_INT (pos);
6f086dfc
RS
1367
1368 if (recog_memoized (insn) >= 0)
1369 return;
1370
1371 /* Otherwise, restore old position. XEXP (x, 0) will be
1372 restored later. */
1373 XEXP (outerdest, 2) = old_pos;
1374 }
1375 }
1376
1377 /* If we get here, the bit-field store doesn't allow memory
1378 or isn't located at a constant position. Load the value into
1379 a register, do the store, and put it back into memory. */
1380
1381 tem1 = gen_reg_rtx (GET_MODE (tem));
1382 emit_insn_before (gen_move_insn (tem1, tem), insn);
1383 emit_insn_after (gen_move_insn (tem, tem1), insn);
1384 XEXP (outerdest, 0) = tem1;
1385 return;
1386 }
1387#endif
1388
1389 /* STRICT_LOW_PART is a no-op on memory references
1390 and it can cause combinations to be unrecognizable,
1391 so eliminate it. */
1392
1393 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1394 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1395
1396 /* A valid insn to copy VAR into or out of a register
1397 must be left alone, to avoid an infinite loop here.
1398 If the reference to VAR is by a subreg, fix that up,
1399 since SUBREG is not valid for a memref.
1400 Also fix up the address of the stack slot. */
1401
1402 if ((SET_SRC (x) == var
1403 || (GET_CODE (SET_SRC (x)) == SUBREG
1404 && SUBREG_REG (SET_SRC (x)) == var))
1405 && (GET_CODE (SET_DEST (x)) == REG
1406 || (GET_CODE (SET_DEST (x)) == SUBREG
1407 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1408 && recog_memoized (insn) >= 0)
1409 {
1410 replacement = find_replacement (replacements, SET_SRC (x));
1411 if (replacement->new)
1412 {
1413 SET_SRC (x) = replacement->new;
1414 return;
1415 }
1416 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1417 SET_SRC (x) = replacement->new
1418 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1419 else
1420 SET_SRC (x) = replacement->new
1421 = fixup_stack_1 (SET_SRC (x), insn);
1422 return;
1423 }
1424
1425 if ((SET_DEST (x) == var
1426 || (GET_CODE (SET_DEST (x)) == SUBREG
1427 && SUBREG_REG (SET_DEST (x)) == var))
1428 && (GET_CODE (SET_SRC (x)) == REG
1429 || (GET_CODE (SET_SRC (x)) == SUBREG
1430 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1431 && recog_memoized (insn) >= 0)
1432 {
1433 if (GET_CODE (SET_DEST (x)) == SUBREG)
1434 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1435 else
1436 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1437 return;
1438 }
1439
1440 /* Otherwise, storing into VAR must be handled specially
1441 by storing into a temporary and copying that into VAR
1442 with a new insn after this one. */
1443
1444 if (dest == var)
1445 {
1446 rtx temp;
1447 rtx fixeddest;
1448 tem = SET_DEST (x);
1449 /* STRICT_LOW_PART can be discarded, around a MEM. */
1450 if (GET_CODE (tem) == STRICT_LOW_PART)
1451 tem = XEXP (tem, 0);
1452 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
1453 if (GET_CODE (tem) == SUBREG)
1454 fixeddest = fixup_memory_subreg (tem, insn, 0);
1455 else
1456 fixeddest = fixup_stack_1 (tem, insn);
1457
1458 temp = gen_reg_rtx (GET_MODE (tem));
1459 emit_insn_after (gen_move_insn (fixeddest, temp), insn);
1460 SET_DEST (x) = temp;
1461 }
1462 }
1463 }
1464
1465 /* Nothing special about this RTX; fix its operands. */
1466
1467 fmt = GET_RTX_FORMAT (code);
1468 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1469 {
1470 if (fmt[i] == 'e')
1471 fixup_var_refs_1 (var, &XEXP (x, i), insn, replacements);
1472 if (fmt[i] == 'E')
1473 {
1474 register int j;
1475 for (j = 0; j < XVECLEN (x, i); j++)
1476 fixup_var_refs_1 (var, &XVECEXP (x, i, j), insn, replacements);
1477 }
1478 }
1479}
1480\f
1481/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1482 return an rtx (MEM:m1 newaddr) which is equivalent.
1483 If any insns must be emitted to compute NEWADDR, put them before INSN.
1484
1485 UNCRITICAL nonzero means accept paradoxical subregs.
1486 This is used for subregs found inside of ZERO_EXTRACTs. */
1487
1488static rtx
1489fixup_memory_subreg (x, insn, uncritical)
1490 rtx x;
1491 rtx insn;
1492 int uncritical;
1493{
1494 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1495 rtx addr = XEXP (SUBREG_REG (x), 0);
1496 enum machine_mode mode = GET_MODE (x);
1497 rtx saved, result;
1498
1499 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1500 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1501 && ! uncritical)
1502 abort ();
1503
1504#if BYTES_BIG_ENDIAN
1505 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1506 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1507#endif
1508 addr = plus_constant (addr, offset);
1509 if (!flag_force_addr && memory_address_p (mode, addr))
1510 /* Shortcut if no insns need be emitted. */
1511 return change_address (SUBREG_REG (x), mode, addr);
1512 start_sequence ();
1513 result = change_address (SUBREG_REG (x), mode, addr);
1514 emit_insn_before (gen_sequence (), insn);
1515 end_sequence ();
1516 return result;
1517}
1518
1519/* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1520 Replace subexpressions of X in place.
1521 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1522 Otherwise return X, with its contents possibly altered.
1523
1524 If any insns must be emitted to compute NEWADDR, put them before INSN. */
1525
1526static rtx
1527walk_fixup_memory_subreg (x, insn)
1528 register rtx x;
1529 rtx insn;
1530{
1531 register enum rtx_code code;
1532 register char *fmt;
1533 register int i;
1534
1535 if (x == 0)
1536 return 0;
1537
1538 code = GET_CODE (x);
1539
1540 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
1541 return fixup_memory_subreg (x, insn, 0);
1542
1543 /* Nothing special about this RTX; fix its operands. */
1544
1545 fmt = GET_RTX_FORMAT (code);
1546 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1547 {
1548 if (fmt[i] == 'e')
1549 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn);
1550 if (fmt[i] == 'E')
1551 {
1552 register int j;
1553 for (j = 0; j < XVECLEN (x, i); j++)
1554 XVECEXP (x, i, j)
1555 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn);
1556 }
1557 }
1558 return x;
1559}
1560\f
1561#if 0
1562/* Fix up any references to stack slots that are invalid memory addresses
1563 because they exceed the maximum range of a displacement. */
1564
1565void
1566fixup_stack_slots ()
1567{
1568 register rtx insn;
1569
1570 /* Did we generate a stack slot that is out of range
1571 or otherwise has an invalid address? */
1572 if (invalid_stack_slot)
1573 {
1574 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1575 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1576 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1577 || GET_CODE (insn) == JUMP_INSN)
1578 fixup_stack_1 (PATTERN (insn), insn);
1579 }
1580}
1581#endif
1582
1583/* For each memory ref within X, if it refers to a stack slot
1584 with an out of range displacement, put the address in a temp register
1585 (emitting new insns before INSN to load these registers)
1586 and alter the memory ref to use that register.
1587 Replace each such MEM rtx with a copy, to avoid clobberage. */
1588
1589static rtx
1590fixup_stack_1 (x, insn)
1591 rtx x;
1592 rtx insn;
1593{
1594 register int i;
1595 register RTX_CODE code = GET_CODE (x);
1596 register char *fmt;
1597
1598 if (code == MEM)
1599 {
1600 register rtx ad = XEXP (x, 0);
1601 /* If we have address of a stack slot but it's not valid
1602 (displacement is too large), compute the sum in a register. */
1603 if (GET_CODE (ad) == PLUS
1604 && GET_CODE (XEXP (ad, 0)) == REG
1605 && REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1606 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER
1607 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1608 {
1609 rtx temp, seq;
1610 if (memory_address_p (GET_MODE (x), ad))
1611 return x;
1612
1613 start_sequence ();
1614 temp = copy_to_reg (ad);
1615 seq = gen_sequence ();
1616 end_sequence ();
1617 emit_insn_before (seq, insn);
1618 return change_address (x, VOIDmode, temp);
1619 }
1620 return x;
1621 }
1622
1623 fmt = GET_RTX_FORMAT (code);
1624 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1625 {
1626 if (fmt[i] == 'e')
1627 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1628 if (fmt[i] == 'E')
1629 {
1630 register int j;
1631 for (j = 0; j < XVECLEN (x, i); j++)
1632 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
1633 }
1634 }
1635 return x;
1636}
1637\f
1638/* Optimization: a bit-field instruction whose field
1639 happens to be a byte or halfword in memory
1640 can be changed to a move instruction.
1641
1642 We call here when INSN is an insn to examine or store into a bit-field.
1643 BODY is the SET-rtx to be altered.
1644
1645 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1646 (Currently this is called only from function.c, and EQUIV_MEM
1647 is always 0.) */
1648
1649static void
1650optimize_bit_field (body, insn, equiv_mem)
1651 rtx body;
1652 rtx insn;
1653 rtx *equiv_mem;
1654{
1655 register rtx bitfield;
1656 int destflag;
1657 rtx seq = 0;
1658 enum machine_mode mode;
1659
1660 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
1661 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
1662 bitfield = SET_DEST (body), destflag = 1;
1663 else
1664 bitfield = SET_SRC (body), destflag = 0;
1665
1666 /* First check that the field being stored has constant size and position
1667 and is in fact a byte or halfword suitably aligned. */
1668
1669 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
1670 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
1671 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
1672 != BLKmode)
1673 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
1674 {
1675 register rtx memref = 0;
1676
1677 /* Now check that the containing word is memory, not a register,
1678 and that it is safe to change the machine mode. */
1679
1680 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
1681 memref = XEXP (bitfield, 0);
1682 else if (GET_CODE (XEXP (bitfield, 0)) == REG
1683 && equiv_mem != 0)
1684 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
1685 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1686 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
1687 memref = SUBREG_REG (XEXP (bitfield, 0));
1688 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1689 && equiv_mem != 0
1690 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
1691 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
1692
1693 if (memref
1694 && ! mode_dependent_address_p (XEXP (memref, 0))
1695 && ! MEM_VOLATILE_P (memref))
1696 {
1697 /* Now adjust the address, first for any subreg'ing
1698 that we are now getting rid of,
1699 and then for which byte of the word is wanted. */
1700
1701 register int offset = INTVAL (XEXP (bitfield, 2));
1702 /* Adjust OFFSET to count bits from low-address byte. */
1703#if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1704 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
1705 - offset - INTVAL (XEXP (bitfield, 1)));
1706#endif
1707 /* Adjust OFFSET to count bytes from low-address byte. */
1708 offset /= BITS_PER_UNIT;
1709 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
1710 {
1711 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
1712#if BYTES_BIG_ENDIAN
1713 offset -= (MIN (UNITS_PER_WORD,
1714 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
1715 - MIN (UNITS_PER_WORD,
1716 GET_MODE_SIZE (GET_MODE (memref))));
1717#endif
1718 }
1719
1720 memref = change_address (memref, mode,
1721 plus_constant (XEXP (memref, 0), offset));
1722
1723 /* Store this memory reference where
1724 we found the bit field reference. */
1725
1726 if (destflag)
1727 {
1728 validate_change (insn, &SET_DEST (body), memref, 1);
1729 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
1730 {
1731 rtx src = SET_SRC (body);
1732 while (GET_CODE (src) == SUBREG
1733 && SUBREG_WORD (src) == 0)
1734 src = SUBREG_REG (src);
1735 if (GET_MODE (src) != GET_MODE (memref))
1736 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
1737 validate_change (insn, &SET_SRC (body), src, 1);
1738 }
1739 else if (GET_MODE (SET_SRC (body)) != VOIDmode
1740 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
1741 /* This shouldn't happen because anything that didn't have
1742 one of these modes should have got converted explicitly
1743 and then referenced through a subreg.
1744 This is so because the original bit-field was
1745 handled by agg_mode and so its tree structure had
1746 the same mode that memref now has. */
1747 abort ();
1748 }
1749 else
1750 {
1751 rtx dest = SET_DEST (body);
1752
1753 while (GET_CODE (dest) == SUBREG
1754 && SUBREG_WORD (dest) == 0)
1755 dest = SUBREG_REG (dest);
1756
1757 validate_change (insn, &SET_DEST (body), dest, 1);
1758
1759 if (GET_MODE (dest) == GET_MODE (memref))
1760 validate_change (insn, &SET_SRC (body), memref, 1);
1761 else
1762 {
1763 /* Convert the mem ref to the destination mode. */
1764 rtx newreg = gen_reg_rtx (GET_MODE (dest));
1765
1766 start_sequence ();
1767 convert_move (newreg, memref,
1768 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
1769 seq = get_insns ();
1770 end_sequence ();
1771
1772 validate_change (insn, &SET_SRC (body), newreg, 1);
1773 }
1774 }
1775
1776 /* See if we can convert this extraction or insertion into
1777 a simple move insn. We might not be able to do so if this
1778 was, for example, part of a PARALLEL.
1779
1780 If we succeed, write out any needed conversions. If we fail,
1781 it is hard to guess why we failed, so don't do anything
1782 special; just let the optimization be suppressed. */
1783
1784 if (apply_change_group () && seq)
1785 emit_insns_before (seq, insn);
1786 }
1787 }
1788}
1789\f
1790/* These routines are responsible for converting virtual register references
1791 to the actual hard register references once RTL generation is complete.
1792
1793 The following four variables are used for communication between the
1794 routines. They contain the offsets of the virtual registers from their
1795 respective hard registers. */
1796
1797static int in_arg_offset;
1798static int var_offset;
1799static int dynamic_offset;
1800static int out_arg_offset;
1801
1802/* In most machines, the stack pointer register is equivalent to the bottom
1803 of the stack. */
1804
1805#ifndef STACK_POINTER_OFFSET
1806#define STACK_POINTER_OFFSET 0
1807#endif
1808
1809/* If not defined, pick an appropriate default for the offset of dynamically
1810 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1811 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1812
1813#ifndef STACK_DYNAMIC_OFFSET
1814
1815#ifdef ACCUMULATE_OUTGOING_ARGS
1816/* The bottom of the stack points to the actual arguments. If
1817 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1818 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1819 stack space for register parameters is not pushed by the caller, but
1820 rather part of the fixed stack areas and hence not included in
1821 `current_function_outgoing_args_size'. Nevertheless, we must allow
1822 for it when allocating stack dynamic objects. */
1823
1824#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1825#define STACK_DYNAMIC_OFFSET(FNDECL) \
1826(current_function_outgoing_args_size \
1827 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
1828
1829#else
1830#define STACK_DYNAMIC_OFFSET(FNDECL) \
1831(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
1832#endif
1833
1834#else
1835#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
1836#endif
1837#endif
1838
1839/* Pass through the INSNS of function FNDECL and convert virtual register
1840 references to hard register references. */
1841
1842void
1843instantiate_virtual_regs (fndecl, insns)
1844 tree fndecl;
1845 rtx insns;
1846{
1847 rtx insn;
1848
1849 /* Compute the offsets to use for this function. */
1850 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
1851 var_offset = STARTING_FRAME_OFFSET;
1852 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
1853 out_arg_offset = STACK_POINTER_OFFSET;
1854
1855 /* Scan all variables and parameters of this function. For each that is
1856 in memory, instantiate all virtual registers if the result is a valid
1857 address. If not, we do it later. That will handle most uses of virtual
1858 regs on many machines. */
1859 instantiate_decls (fndecl, 1);
1860
1861 /* Initialize recognition, indicating that volatile is OK. */
1862 init_recog ();
1863
1864 /* Scan through all the insns, instantiating every virtual register still
1865 present. */
1866 for (insn = insns; insn; insn = NEXT_INSN (insn))
1867 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1868 || GET_CODE (insn) == CALL_INSN)
1869 {
1870 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
5f4f0e22 1871 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
6f086dfc
RS
1872 }
1873
1874 /* Now instantiate the remaining register equivalences for debugging info.
1875 These will not be valid addresses. */
1876 instantiate_decls (fndecl, 0);
1877
1878 /* Indicate that, from now on, assign_stack_local should use
1879 frame_pointer_rtx. */
1880 virtuals_instantiated = 1;
1881}
1882
1883/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1884 all virtual registers in their DECL_RTL's.
1885
1886 If VALID_ONLY, do this only if the resulting address is still valid.
1887 Otherwise, always do it. */
1888
1889static void
1890instantiate_decls (fndecl, valid_only)
1891 tree fndecl;
1892 int valid_only;
1893{
1894 tree decl;
1895
a82ad570 1896 if (DECL_INLINE (fndecl))
6f086dfc
RS
1897 /* When compiling an inline function, the obstack used for
1898 rtl allocation is the maybepermanent_obstack. Calling
1899 `resume_temporary_allocation' switches us back to that
1900 obstack while we process this function's parameters. */
1901 resume_temporary_allocation ();
1902
1903 /* Process all parameters of the function. */
1904 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1905 {
5a73491b
RK
1906 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
1907 valid_only);
1908 instantiate_decl (DECL_INCOMING_RTL (decl),
1909 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
6f086dfc
RS
1910 }
1911
1912 /* Now process all variables defined in the function or its subblocks. */
1913 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1914
a82ad570 1915 if (DECL_INLINE (fndecl))
6f086dfc
RS
1916 {
1917 /* Save all rtl allocated for this function by raising the
1918 high-water mark on the maybepermanent_obstack. */
1919 preserve_data ();
1920 /* All further rtl allocation is now done in the current_obstack. */
1921 rtl_in_current_obstack ();
1922 }
1923}
1924
1925/* Subroutine of instantiate_decls: Process all decls in the given
1926 BLOCK node and all its subblocks. */
1927
1928static void
1929instantiate_decls_1 (let, valid_only)
1930 tree let;
1931 int valid_only;
1932{
1933 tree t;
1934
1935 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
5a73491b
RK
1936 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
1937 valid_only);
6f086dfc
RS
1938
1939 /* Process all subblocks. */
1940 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1941 instantiate_decls_1 (t, valid_only);
1942}
5a73491b
RK
1943
1944/* Subroutine of the preceeding procedures: Given RTL representing a
1945 decl and the size of the object, do any instantiation required.
1946
1947 If VALID_ONLY is non-zero, it means that the RTL should only be
1948 changed if the new address is valid. */
1949
1950static void
1951instantiate_decl (x, size, valid_only)
1952 rtx x;
1953 int size;
1954 int valid_only;
1955{
1956 enum machine_mode mode;
1957 rtx addr;
1958
1959 /* If this is not a MEM, no need to do anything. Similarly if the
1960 address is a constant or a register that is not a virtual register. */
1961
1962 if (x == 0 || GET_CODE (x) != MEM)
1963 return;
1964
1965 addr = XEXP (x, 0);
1966 if (CONSTANT_P (addr)
1967 || (GET_CODE (addr) == REG
1968 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1969 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1970 return;
1971
1972 /* If we should only do this if the address is valid, copy the address.
1973 We need to do this so we can undo any changes that might make the
1974 address invalid. This copy is unfortunate, but probably can't be
1975 avoided. */
1976
1977 if (valid_only)
1978 addr = copy_rtx (addr);
1979
1980 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
1981
1982 if (! valid_only)
1983 return;
1984
1985 /* Now verify that the resulting address is valid for every integer or
1986 floating-point mode up to and including SIZE bytes long. We do this
1987 since the object might be accessed in any mode and frame addresses
1988 are shared. */
1989
1990 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1991 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
1992 mode = GET_MODE_WIDER_MODE (mode))
1993 if (! memory_address_p (mode, addr))
1994 return;
1995
1996 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1997 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
1998 mode = GET_MODE_WIDER_MODE (mode))
1999 if (! memory_address_p (mode, addr))
2000 return;
2001
2002 /* Otherwise, put back the address, now that we have updated it and we
2003 know it is valid. */
2004
2005 XEXP (x, 0) = addr;
2006}
6f086dfc
RS
2007\f
2008/* Given a pointer to a piece of rtx and an optional pointer to the
2009 containing object, instantiate any virtual registers present in it.
2010
2011 If EXTRA_INSNS, we always do the replacement and generate
2012 any extra insns before OBJECT. If it zero, we do nothing if replacement
2013 is not valid.
2014
2015 Return 1 if we either had nothing to do or if we were able to do the
2016 needed replacement. Return 0 otherwise; we only return zero if
2017 EXTRA_INSNS is zero.
2018
2019 We first try some simple transformations to avoid the creation of extra
2020 pseudos. */
2021
2022static int
2023instantiate_virtual_regs_1 (loc, object, extra_insns)
2024 rtx *loc;
2025 rtx object;
2026 int extra_insns;
2027{
2028 rtx x;
2029 RTX_CODE code;
2030 rtx new = 0;
2031 int offset;
2032 rtx temp;
2033 rtx seq;
2034 int i, j;
2035 char *fmt;
2036
2037 /* Re-start here to avoid recursion in common cases. */
2038 restart:
2039
2040 x = *loc;
2041 if (x == 0)
2042 return 1;
2043
2044 code = GET_CODE (x);
2045
2046 /* Check for some special cases. */
2047 switch (code)
2048 {
2049 case CONST_INT:
2050 case CONST_DOUBLE:
2051 case CONST:
2052 case SYMBOL_REF:
2053 case CODE_LABEL:
2054 case PC:
2055 case CC0:
2056 case ASM_INPUT:
2057 case ADDR_VEC:
2058 case ADDR_DIFF_VEC:
2059 case RETURN:
2060 return 1;
2061
2062 case SET:
2063 /* We are allowed to set the virtual registers. This means that
2064 that the actual register should receive the source minus the
2065 appropriate offset. This is used, for example, in the handling
2066 of non-local gotos. */
2067 if (SET_DEST (x) == virtual_incoming_args_rtx)
2068 new = arg_pointer_rtx, offset = - in_arg_offset;
2069 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2070 new = frame_pointer_rtx, offset = - var_offset;
2071 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2072 new = stack_pointer_rtx, offset = - dynamic_offset;
2073 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2074 new = stack_pointer_rtx, offset = - out_arg_offset;
2075
2076 if (new)
2077 {
2078 /* The only valid sources here are PLUS or REG. Just do
2079 the simplest possible thing to handle them. */
2080 if (GET_CODE (SET_SRC (x)) != REG
2081 && GET_CODE (SET_SRC (x)) != PLUS)
2082 abort ();
2083
2084 start_sequence ();
2085 if (GET_CODE (SET_SRC (x)) != REG)
5f4f0e22 2086 temp = force_operand (SET_SRC (x), NULL_RTX);
6f086dfc
RS
2087 else
2088 temp = SET_SRC (x);
5f4f0e22 2089 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
6f086dfc
RS
2090 seq = get_insns ();
2091 end_sequence ();
2092
2093 emit_insns_before (seq, object);
2094 SET_DEST (x) = new;
2095
2096 if (!validate_change (object, &SET_SRC (x), temp, 0)
2097 || ! extra_insns)
2098 abort ();
2099
2100 return 1;
2101 }
2102
2103 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2104 loc = &SET_SRC (x);
2105 goto restart;
2106
2107 case PLUS:
2108 /* Handle special case of virtual register plus constant. */
2109 if (CONSTANT_P (XEXP (x, 1)))
2110 {
2111 rtx old;
2112
2113 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2114 if (GET_CODE (XEXP (x, 0)) == PLUS)
2115 {
2116 rtx inner = XEXP (XEXP (x, 0), 0);
2117
2118 if (inner == virtual_incoming_args_rtx)
2119 new = arg_pointer_rtx, offset = in_arg_offset;
2120 else if (inner == virtual_stack_vars_rtx)
2121 new = frame_pointer_rtx, offset = var_offset;
2122 else if (inner == virtual_stack_dynamic_rtx)
2123 new = stack_pointer_rtx, offset = dynamic_offset;
2124 else if (inner == virtual_outgoing_args_rtx)
2125 new = stack_pointer_rtx, offset = out_arg_offset;
2126 else
2127 {
2128 loc = &XEXP (x, 0);
2129 goto restart;
2130 }
2131
2132 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2133 extra_insns);
2134 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2135 }
2136
2137 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2138 new = arg_pointer_rtx, offset = in_arg_offset;
2139 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2140 new = frame_pointer_rtx, offset = var_offset;
2141 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2142 new = stack_pointer_rtx, offset = dynamic_offset;
2143 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2144 new = stack_pointer_rtx, offset = out_arg_offset;
2145 else
2146 {
2147 /* We know the second operand is a constant. Unless the
2148 first operand is a REG (which has been already checked),
2149 it needs to be checked. */
2150 if (GET_CODE (XEXP (x, 0)) != REG)
2151 {
2152 loc = &XEXP (x, 0);
2153 goto restart;
2154 }
2155 return 1;
2156 }
2157
2158 old = XEXP (x, 0);
2159 XEXP (x, 0) = new;
2160 new = plus_constant (XEXP (x, 1), offset);
2161
2162 /* If the new constant is zero, try to replace the sum with its
2163 first operand. */
2164 if (new == const0_rtx
2165 && validate_change (object, loc, XEXP (x, 0), 0))
2166 return 1;
2167
2168 /* Next try to replace constant with new one. */
2169 if (!validate_change (object, &XEXP (x, 1), new, 0))
2170 {
2171 if (! extra_insns)
2172 {
2173 XEXP (x, 0) = old;
2174 return 0;
2175 }
2176
2177 /* Otherwise copy the new constant into a register and replace
2178 constant with that register. */
2179 temp = gen_reg_rtx (Pmode);
2180 if (validate_change (object, &XEXP (x, 1), temp, 0))
2181 emit_insn_before (gen_move_insn (temp, new), object);
2182 else
2183 {
2184 /* If that didn't work, replace this expression with a
2185 register containing the sum. */
2186
2187 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2188 XEXP (x, 0) = old;
2189
2190 start_sequence ();
5f4f0e22 2191 temp = force_operand (new, NULL_RTX);
6f086dfc
RS
2192 seq = get_insns ();
2193 end_sequence ();
2194
2195 emit_insns_before (seq, object);
2196 if (! validate_change (object, loc, temp, 0)
2197 && ! validate_replace_rtx (x, temp, object))
2198 abort ();
2199 }
2200 }
2201
2202 return 1;
2203 }
2204
2205 /* Fall through to generic two-operand expression case. */
2206 case EXPR_LIST:
2207 case CALL:
2208 case COMPARE:
2209 case MINUS:
2210 case MULT:
2211 case DIV: case UDIV:
2212 case MOD: case UMOD:
2213 case AND: case IOR: case XOR:
2214 case LSHIFT: case ASHIFT: case ROTATE:
2215 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2216 case NE: case EQ:
2217 case GE: case GT: case GEU: case GTU:
2218 case LE: case LT: case LEU: case LTU:
2219 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2220 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2221 loc = &XEXP (x, 0);
2222 goto restart;
2223
2224 case MEM:
2225 /* Most cases of MEM that convert to valid addresses have already been
2226 handled by our scan of regno_reg_rtx. The only special handling we
2227 need here is to make a copy of the rtx to ensure it isn't being
b335c2cc 2228 shared if we have to change it to a pseudo.
6f086dfc
RS
2229
2230 If the rtx is a simple reference to an address via a virtual register,
2231 it can potentially be shared. In such cases, first try to make it
2232 a valid address, which can also be shared. Otherwise, copy it and
2233 proceed normally.
2234
2235 First check for common cases that need no processing. These are
2236 usually due to instantiation already being done on a previous instance
2237 of a shared rtx. */
2238
2239 temp = XEXP (x, 0);
2240 if (CONSTANT_ADDRESS_P (temp)
2241#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2242 || temp == arg_pointer_rtx
2243#endif
2244 || temp == frame_pointer_rtx)
2245 return 1;
2246
2247 if (GET_CODE (temp) == PLUS
2248 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2249 && (XEXP (temp, 0) == frame_pointer_rtx
2250#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2251 || XEXP (temp, 0) == arg_pointer_rtx
2252#endif
2253 ))
2254 return 1;
2255
2256 if (temp == virtual_stack_vars_rtx
2257 || temp == virtual_incoming_args_rtx
2258 || (GET_CODE (temp) == PLUS
2259 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2260 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2261 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2262 {
2263 /* This MEM may be shared. If the substitution can be done without
2264 the need to generate new pseudos, we want to do it in place
2265 so all copies of the shared rtx benefit. The call below will
2266 only make substitutions if the resulting address is still
2267 valid.
2268
2269 Note that we cannot pass X as the object in the recursive call
2270 since the insn being processed may not allow all valid
6461be14
RS
2271 addresses. However, if we were not passed on object, we can
2272 only modify X without copying it if X will have a valid
2273 address.
6f086dfc 2274
6461be14
RS
2275 ??? Also note that this can still lose if OBJECT is an insn that
2276 has less restrictions on an address that some other insn.
2277 In that case, we will modify the shared address. This case
2278 doesn't seem very likely, though. */
2279
2280 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2281 object ? object : x, 0))
6f086dfc
RS
2282 return 1;
2283
2284 /* Otherwise make a copy and process that copy. We copy the entire
2285 RTL expression since it might be a PLUS which could also be
2286 shared. */
2287 *loc = x = copy_rtx (x);
2288 }
2289
2290 /* Fall through to generic unary operation case. */
2291 case USE:
2292 case CLOBBER:
2293 case SUBREG:
2294 case STRICT_LOW_PART:
2295 case NEG: case NOT:
2296 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2297 case SIGN_EXTEND: case ZERO_EXTEND:
2298 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2299 case FLOAT: case FIX:
2300 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2301 case ABS:
2302 case SQRT:
2303 case FFS:
2304 /* These case either have just one operand or we know that we need not
2305 check the rest of the operands. */
2306 loc = &XEXP (x, 0);
2307 goto restart;
2308
2309 case REG:
2310 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2311 in front of this insn and substitute the temporary. */
2312 if (x == virtual_incoming_args_rtx)
2313 new = arg_pointer_rtx, offset = in_arg_offset;
2314 else if (x == virtual_stack_vars_rtx)
2315 new = frame_pointer_rtx, offset = var_offset;
2316 else if (x == virtual_stack_dynamic_rtx)
2317 new = stack_pointer_rtx, offset = dynamic_offset;
2318 else if (x == virtual_outgoing_args_rtx)
2319 new = stack_pointer_rtx, offset = out_arg_offset;
2320
2321 if (new)
2322 {
2323 temp = plus_constant (new, offset);
2324 if (!validate_change (object, loc, temp, 0))
2325 {
2326 if (! extra_insns)
2327 return 0;
2328
2329 start_sequence ();
5f4f0e22 2330 temp = force_operand (temp, NULL_RTX);
6f086dfc
RS
2331 seq = get_insns ();
2332 end_sequence ();
2333
2334 emit_insns_before (seq, object);
2335 if (! validate_change (object, loc, temp, 0)
2336 && ! validate_replace_rtx (x, temp, object))
2337 abort ();
2338 }
2339 }
2340
2341 return 1;
2342 }
2343
2344 /* Scan all subexpressions. */
2345 fmt = GET_RTX_FORMAT (code);
2346 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2347 if (*fmt == 'e')
2348 {
2349 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2350 return 0;
2351 }
2352 else if (*fmt == 'E')
2353 for (j = 0; j < XVECLEN (x, i); j++)
2354 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2355 extra_insns))
2356 return 0;
2357
2358 return 1;
2359}
2360\f
2361/* Optimization: assuming this function does not receive nonlocal gotos,
2362 delete the handlers for such, as well as the insns to establish
2363 and disestablish them. */
2364
2365static void
2366delete_handlers ()
2367{
2368 rtx insn;
2369 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2370 {
2371 /* Delete the handler by turning off the flag that would
2372 prevent jump_optimize from deleting it.
2373 Also permit deletion of the nonlocal labels themselves
2374 if nothing local refers to them. */
2375 if (GET_CODE (insn) == CODE_LABEL)
2376 LABEL_PRESERVE_P (insn) = 0;
2377 if (GET_CODE (insn) == INSN
59257ff7
RK
2378 && ((nonlocal_goto_handler_slot != 0
2379 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
2380 || (nonlocal_goto_stack_level != 0
2381 && reg_mentioned_p (nonlocal_goto_stack_level,
2382 PATTERN (insn)))))
6f086dfc
RS
2383 delete_insn (insn);
2384 }
2385}
2386
2387/* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2388 of the current function. */
2389
2390rtx
2391nonlocal_label_rtx_list ()
2392{
2393 tree t;
2394 rtx x = 0;
2395
2396 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2397 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2398
2399 return x;
2400}
2401\f
2402/* Output a USE for any register use in RTL.
2403 This is used with -noreg to mark the extent of lifespan
2404 of any registers used in a user-visible variable's DECL_RTL. */
2405
2406void
2407use_variable (rtl)
2408 rtx rtl;
2409{
2410 if (GET_CODE (rtl) == REG)
2411 /* This is a register variable. */
2412 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2413 else if (GET_CODE (rtl) == MEM
2414 && GET_CODE (XEXP (rtl, 0)) == REG
2415 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2416 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2417 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2418 /* This is a variable-sized structure. */
2419 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2420}
2421
2422/* Like use_variable except that it outputs the USEs after INSN
2423 instead of at the end of the insn-chain. */
2424
2425void
2426use_variable_after (rtl, insn)
2427 rtx rtl, insn;
2428{
2429 if (GET_CODE (rtl) == REG)
2430 /* This is a register variable. */
2431 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2432 else if (GET_CODE (rtl) == MEM
2433 && GET_CODE (XEXP (rtl, 0)) == REG
2434 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2435 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2436 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2437 /* This is a variable-sized structure. */
2438 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2439}
2440\f
2441int
2442max_parm_reg_num ()
2443{
2444 return max_parm_reg;
2445}
2446
2447/* Return the first insn following those generated by `assign_parms'. */
2448
2449rtx
2450get_first_nonparm_insn ()
2451{
2452 if (last_parm_insn)
2453 return NEXT_INSN (last_parm_insn);
2454 return get_insns ();
2455}
2456
2457/* Return 1 if EXP returns an aggregate value, for which an address
2458 must be passed to the function or returned by the function. */
2459
2460int
2461aggregate_value_p (exp)
2462 tree exp;
2463{
2464 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2465 return 1;
2466 if (RETURN_IN_MEMORY (TREE_TYPE (exp)))
2467 return 1;
2468 if (flag_pcc_struct_return
2469 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2470 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE))
2471 return 1;
2472 return 0;
2473}
2474\f
2475/* Assign RTL expressions to the function's parameters.
2476 This may involve copying them into registers and using
2477 those registers as the RTL for them.
2478
2479 If SECOND_TIME is non-zero it means that this function is being
2480 called a second time. This is done by integrate.c when a function's
2481 compilation is deferred. We need to come back here in case the
2482 FUNCTION_ARG macro computes items needed for the rest of the compilation
2483 (such as changing which registers are fixed or caller-saved). But suppress
2484 writing any insns or setting DECL_RTL of anything in this case. */
2485
2486void
2487assign_parms (fndecl, second_time)
2488 tree fndecl;
2489 int second_time;
2490{
2491 register tree parm;
2492 register rtx entry_parm = 0;
2493 register rtx stack_parm = 0;
2494 CUMULATIVE_ARGS args_so_far;
2495 enum machine_mode passed_mode, nominal_mode;
2496 /* Total space needed so far for args on the stack,
2497 given as a constant and a tree-expression. */
2498 struct args_size stack_args_size;
2499 tree fntype = TREE_TYPE (fndecl);
2500 tree fnargs = DECL_ARGUMENTS (fndecl);
2501 /* This is used for the arg pointer when referring to stack args. */
2502 rtx internal_arg_pointer;
2503 /* This is a dummy PARM_DECL that we used for the function result if
2504 the function returns a structure. */
2505 tree function_result_decl = 0;
2506 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2507 int varargs_setup = 0;
2508
2509 /* Nonzero if the last arg is named `__builtin_va_alist',
2510 which is used on some machines for old-fashioned non-ANSI varargs.h;
2511 this should be stuck onto the stack as if it had arrived there. */
2512 int vararg
2513 = (fnargs
2514 && (parm = tree_last (fnargs)) != 0
2515 && DECL_NAME (parm)
2516 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2517 "__builtin_va_alist")));
2518
2519 /* Nonzero if function takes extra anonymous args.
2520 This means the last named arg must be on the stack
2521 right before the anonymous ones. */
2522 int stdarg
2523 = (TYPE_ARG_TYPES (fntype) != 0
2524 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2525 != void_type_node));
2526
2527 /* If the reg that the virtual arg pointer will be translated into is
2528 not a fixed reg or is the stack pointer, make a copy of the virtual
2529 arg pointer, and address parms via the copy. The frame pointer is
2530 considered fixed even though it is not marked as such.
2531
2532 The second time through, simply use ap to avoid generating rtx. */
2533
2534 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2535 || ! (fixed_regs[ARG_POINTER_REGNUM]
2536 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2537 && ! second_time)
2538 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2539 else
2540 internal_arg_pointer = virtual_incoming_args_rtx;
2541 current_function_internal_arg_pointer = internal_arg_pointer;
2542
2543 stack_args_size.constant = 0;
2544 stack_args_size.var = 0;
2545
2546 /* If struct value address is treated as the first argument, make it so. */
2547 if (aggregate_value_p (DECL_RESULT (fndecl))
2548 && ! current_function_returns_pcc_struct
2549 && struct_value_incoming_rtx == 0)
2550 {
2551 tree type = build_pointer_type (fntype);
2552
5f4f0e22 2553 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
6f086dfc
RS
2554
2555 DECL_ARG_TYPE (function_result_decl) = type;
2556 TREE_CHAIN (function_result_decl) = fnargs;
2557 fnargs = function_result_decl;
2558 }
2559
2560 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2561 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2562
2563#ifdef INIT_CUMULATIVE_INCOMING_ARGS
5f4f0e22 2564 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_PTR);
6f086dfc 2565#else
5f4f0e22 2566 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_PTR);
6f086dfc
RS
2567#endif
2568
2569 /* We haven't yet found an argument that we must push and pretend the
2570 caller did. */
2571 current_function_pretend_args_size = 0;
2572
2573 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2574 {
2575 int aggregate
2576 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2577 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
2578 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE);
2579 struct args_size stack_offset;
2580 struct args_size arg_size;
2581 int passed_pointer = 0;
2582 tree passed_type = DECL_ARG_TYPE (parm);
2583
2584 /* Set LAST_NAMED if this is last named arg before some
2585 anonymous args. We treat it as if it were anonymous too. */
2586 int last_named = ((TREE_CHAIN (parm) == 0
2587 || DECL_NAME (TREE_CHAIN (parm)) == 0)
2588 && (vararg || stdarg));
2589
2590 if (TREE_TYPE (parm) == error_mark_node
2591 /* This can happen after weird syntax errors
2592 or if an enum type is defined among the parms. */
2593 || TREE_CODE (parm) != PARM_DECL
2594 || passed_type == NULL)
2595 {
2596 DECL_RTL (parm) = gen_rtx (MEM, BLKmode, const0_rtx);
2597 TREE_USED (parm) = 1;
2598 continue;
2599 }
2600
2601 /* For varargs.h function, save info about regs and stack space
2602 used by the individual args, not including the va_alist arg. */
2603 if (vararg && last_named)
2604 current_function_args_info = args_so_far;
2605
2606 /* Find mode of arg as it is passed, and mode of arg
2607 as it should be during execution of this function. */
2608 passed_mode = TYPE_MODE (passed_type);
2609 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
2610
16bae307
RS
2611 /* If the parm's mode is VOID, its value doesn't matter,
2612 and avoid the usual things like emit_move_insn that could crash. */
2613 if (nominal_mode == VOIDmode)
2614 {
2615 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
2616 continue;
2617 }
2618
6f086dfc
RS
2619#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2620 /* See if this arg was passed by invisible reference. */
2621 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2622 passed_type, ! last_named))
2623 {
2624 passed_type = build_pointer_type (passed_type);
2625 passed_pointer = 1;
2626 passed_mode = nominal_mode = Pmode;
2627 }
2628#endif
2629
2630 /* Let machine desc say which reg (if any) the parm arrives in.
2631 0 means it arrives on the stack. */
2632#ifdef FUNCTION_INCOMING_ARG
2633 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2634 passed_type, ! last_named);
2635#else
2636 entry_parm = FUNCTION_ARG (args_so_far, passed_mode,
2637 passed_type, ! last_named);
2638#endif
2639
2640#ifdef SETUP_INCOMING_VARARGS
2641 /* If this is the last named parameter, do any required setup for
2642 varargs or stdargs. We need to know about the case of this being an
2643 addressable type, in which case we skip the registers it
2644 would have arrived in.
2645
2646 For stdargs, LAST_NAMED will be set for two parameters, the one that
2647 is actually the last named, and the dummy parameter. We only
2648 want to do this action once.
2649
2650 Also, indicate when RTL generation is to be suppressed. */
2651 if (last_named && !varargs_setup)
2652 {
2653 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
2654 current_function_pretend_args_size,
2655 second_time);
2656 varargs_setup = 1;
2657 }
2658#endif
2659
2660 /* Determine parm's home in the stack,
2661 in case it arrives in the stack or we should pretend it did.
2662
2663 Compute the stack position and rtx where the argument arrives
2664 and its size.
2665
2666 There is one complexity here: If this was a parameter that would
2667 have been passed in registers, but wasn't only because it is
2668 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2669 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2670 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2671 0 as it was the previous time. */
2672
2673 locate_and_pad_parm (passed_mode, passed_type,
2674#ifdef STACK_PARMS_IN_REG_PARM_AREA
2675 1,
2676#else
2677#ifdef FUNCTION_INCOMING_ARG
2678 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2679 passed_type,
2680 (! last_named
2681 || varargs_setup)) != 0,
2682#else
2683 FUNCTION_ARG (args_so_far, passed_mode,
2684 passed_type,
2685 ! last_named || varargs_setup) != 0,
2686#endif
2687#endif
2688 fndecl, &stack_args_size, &stack_offset, &arg_size);
2689
2690 if (! second_time)
2691 {
2692 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
2693
2694 if (offset_rtx == const0_rtx)
2695 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
2696 else
2697 stack_parm = gen_rtx (MEM, passed_mode,
2698 gen_rtx (PLUS, Pmode,
2699 internal_arg_pointer, offset_rtx));
2700
2701 /* If this is a memory ref that contains aggregate components,
2702 mark it as such for cse and loop optimize. */
2703 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2704 }
2705
2706 /* If this parameter was passed both in registers and in the stack,
2707 use the copy on the stack. */
2708 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
2709 entry_parm = 0;
2710
2711 /* If this parm was passed part in regs and part in memory,
2712 pretend it arrived entirely in memory
2713 by pushing the register-part onto the stack.
2714
2715 In the special case of a DImode or DFmode that is split,
2716 we could put it together in a pseudoreg directly,
2717 but for now that's not worth bothering with. */
2718
2719 if (entry_parm)
2720 {
2721 int nregs = 0;
2722#ifdef FUNCTION_ARG_PARTIAL_NREGS
2723 nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
2724 passed_type, ! last_named);
2725#endif
2726
2727 if (nregs > 0)
2728 {
2729 current_function_pretend_args_size
2730 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
2731 / (PARM_BOUNDARY / BITS_PER_UNIT)
2732 * (PARM_BOUNDARY / BITS_PER_UNIT));
2733
2734 if (! second_time)
2735 move_block_from_reg (REGNO (entry_parm),
2736 validize_mem (stack_parm), nregs);
2737 entry_parm = stack_parm;
2738 }
2739 }
2740
2741 /* If we didn't decide this parm came in a register,
2742 by default it came on the stack. */
2743 if (entry_parm == 0)
2744 entry_parm = stack_parm;
2745
2746 /* Record permanently how this parm was passed. */
2747 if (! second_time)
2748 DECL_INCOMING_RTL (parm) = entry_parm;
2749
2750 /* If there is actually space on the stack for this parm,
2751 count it in stack_args_size; otherwise set stack_parm to 0
2752 to indicate there is no preallocated stack slot for the parm. */
2753
2754 if (entry_parm == stack_parm
d9ca49d5 2755#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
6f086dfc 2756 /* On some machines, even if a parm value arrives in a register
d9ca49d5
JW
2757 there is still an (uninitialized) stack slot allocated for it.
2758
2759 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
2760 whether this parameter already has a stack slot allocated,
2761 because an arg block exists only if current_function_args_size
2762 is larger than some threshhold, and we haven't calculated that
2763 yet. So, for now, we just assume that stack slots never exist
2764 in this case. */
6f086dfc
RS
2765 || REG_PARM_STACK_SPACE (fndecl) > 0
2766#endif
2767 )
2768 {
2769 stack_args_size.constant += arg_size.constant;
2770 if (arg_size.var)
2771 ADD_PARM_SIZE (stack_args_size, arg_size.var);
2772 }
2773 else
2774 /* No stack slot was pushed for this parm. */
2775 stack_parm = 0;
2776
2777 /* Update info on where next arg arrives in registers. */
2778
2779 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
2780 passed_type, ! last_named);
2781
2782 /* If this is our second time through, we are done with this parm. */
2783 if (second_time)
2784 continue;
2785
e16c591a
RS
2786 /* If we can't trust the parm stack slot to be aligned enough
2787 for its ultimate type, don't use that slot after entry.
2788 We'll make another stack slot, if we need one. */
2789 {
2790#ifdef FUNCTION_ARG_BOUNDARY
2791 int thisparm_boundary
2792 = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type);
2793#else
2794 int thisparm_boundary = PARM_BOUNDARY;
2795#endif
2796
2797 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
2798 stack_parm = 0;
2799 }
2800
6f086dfc
RS
2801 /* Now adjust STACK_PARM to the mode and precise location
2802 where this parameter should live during execution,
2803 if we discover that it must live in the stack during execution.
2804 To make debuggers happier on big-endian machines, we store
2805 the value in the last bytes of the space available. */
2806
2807 if (nominal_mode != BLKmode && nominal_mode != passed_mode
2808 && stack_parm != 0)
2809 {
2810 rtx offset_rtx;
2811
2812#if BYTES_BIG_ENDIAN
2813 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
2814 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
2815 - GET_MODE_SIZE (nominal_mode));
2816#endif
2817
2818 offset_rtx = ARGS_SIZE_RTX (stack_offset);
2819 if (offset_rtx == const0_rtx)
2820 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
2821 else
2822 stack_parm = gen_rtx (MEM, nominal_mode,
2823 gen_rtx (PLUS, Pmode,
2824 internal_arg_pointer, offset_rtx));
2825
2826 /* If this is a memory ref that contains aggregate components,
2827 mark it as such for cse and loop optimize. */
2828 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2829 }
2830
2831 /* ENTRY_PARM is an RTX for the parameter as it arrives,
2832 in the mode in which it arrives.
2833 STACK_PARM is an RTX for a stack slot where the parameter can live
2834 during the function (in case we want to put it there).
2835 STACK_PARM is 0 if no stack slot was pushed for it.
2836
2837 Now output code if necessary to convert ENTRY_PARM to
2838 the type in which this function declares it,
2839 and store that result in an appropriate place,
2840 which may be a pseudo reg, may be STACK_PARM,
2841 or may be a local stack slot if STACK_PARM is 0.
2842
2843 Set DECL_RTL to that place. */
2844
2845 if (nominal_mode == BLKmode)
2846 {
2847 /* If a BLKmode arrives in registers, copy it to a stack slot. */
2848 if (GET_CODE (entry_parm) == REG)
2849 {
2850 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
2851 UNITS_PER_WORD);
2852
2853 /* Note that we will be storing an integral number of words.
2854 So we have to be careful to ensure that we allocate an
2855 integral number of words. We do this below in the
2856 assign_stack_local if space was not allocated in the argument
2857 list. If it was, this will not work if PARM_BOUNDARY is not
2858 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2859 if it becomes a problem. */
2860
2861 if (stack_parm == 0)
2862 stack_parm
2863 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
2864 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
2865 abort ();
2866
2867 move_block_from_reg (REGNO (entry_parm),
2868 validize_mem (stack_parm),
2869 size_stored / UNITS_PER_WORD);
2870 }
2871 DECL_RTL (parm) = stack_parm;
2872 }
74bd77a8 2873 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
a82ad570 2874 && ! DECL_INLINE (fndecl))
6f086dfc
RS
2875 /* layout_decl may set this. */
2876 || TREE_ADDRESSABLE (parm)
2877 || TREE_SIDE_EFFECTS (parm)
2878 /* If -ffloat-store specified, don't put explicit
2879 float variables into registers. */
2880 || (flag_float_store
2881 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
2882 /* Always assign pseudo to structure return or item passed
2883 by invisible reference. */
2884 || passed_pointer || parm == function_result_decl)
2885 {
2886 /* Store the parm in a pseudoregister during the function. */
2887 register rtx parmreg = gen_reg_rtx (nominal_mode);
2888
2889 REG_USERVAR_P (parmreg) = 1;
2890
2891 /* If this was an item that we received a pointer to, set DECL_RTL
2892 appropriately. */
2893 if (passed_pointer)
2894 {
2895 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
2896 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
2897 }
2898 else
2899 DECL_RTL (parm) = parmreg;
2900
2901 /* Copy the value into the register. */
2902 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
86f8eff3
RK
2903 {
2904 /* If ENTRY_PARM is a hard register, it might be in a register
2905 not valid for operating in its mode (e.g., an odd-numbered
2906 register for a DFmode). In that case, moves are the only
2907 thing valid, so we can't do a convert from there. This
2908 occurs when the calling sequence allow such misaligned
2909 usages. */
2910 if (GET_CODE (entry_parm) == REG
2911 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2912 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm),
2913 GET_MODE (entry_parm)))
5f4f0e22 2914 convert_move (parmreg, copy_to_reg (entry_parm), 0);
86f8eff3
RK
2915 else
2916 convert_move (parmreg, validize_mem (entry_parm), 0);
2917 }
6f086dfc
RS
2918 else
2919 emit_move_insn (parmreg, validize_mem (entry_parm));
2920
74bd77a8
RS
2921 /* If we were passed a pointer but the actual value
2922 can safely live in a register, put it in one. */
16bae307 2923 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
74bd77a8
RS
2924 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
2925 && ! DECL_INLINE (fndecl))
2926 /* layout_decl may set this. */
2927 || TREE_ADDRESSABLE (parm)
2928 || TREE_SIDE_EFFECTS (parm)
2929 /* If -ffloat-store specified, don't put explicit
2930 float variables into registers. */
2931 || (flag_float_store
2932 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
2933 {
2654605a
JW
2934 /* We can't use nominal_mode, because it will have been set to
2935 Pmode above. We must use the actual mode of the parm. */
2936 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
74bd77a8
RS
2937 emit_move_insn (parmreg, DECL_RTL (parm));
2938 DECL_RTL (parm) = parmreg;
2939 }
2940
6f086dfc
RS
2941 /* In any case, record the parm's desired stack location
2942 in case we later discover it must live in the stack. */
2943 if (REGNO (parmreg) >= nparmregs)
2944 {
2945 rtx *new;
2946 nparmregs = REGNO (parmreg) + 5;
2947 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
2948 bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx));
2949 parm_reg_stack_loc = new;
2950 }
2951 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
2952
2953 /* Mark the register as eliminable if we did no conversion
2954 and it was copied from memory at a fixed offset,
2955 and the arg pointer was not copied to a pseudo-reg.
2956 If the arg pointer is a pseudo reg or the offset formed
2957 an invalid address, such memory-equivalences
2958 as we make here would screw up life analysis for it. */
2959 if (nominal_mode == passed_mode
2960 && GET_CODE (entry_parm) == MEM
e16c591a 2961 && entry_parm == stack_parm
6f086dfc
RS
2962 && stack_offset.var == 0
2963 && reg_mentioned_p (virtual_incoming_args_rtx,
2964 XEXP (entry_parm, 0)))
2965 REG_NOTES (get_last_insn ())
2966 = gen_rtx (EXPR_LIST, REG_EQUIV,
2967 entry_parm, REG_NOTES (get_last_insn ()));
2968
2969 /* For pointer data type, suggest pointer register. */
2970 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
2971 mark_reg_pointer (parmreg);
2972 }
2973 else
2974 {
2975 /* Value must be stored in the stack slot STACK_PARM
2976 during function execution. */
2977
2978 if (passed_mode != nominal_mode)
86f8eff3
RK
2979 {
2980 /* Conversion is required. */
2981 if (GET_CODE (entry_parm) == REG
2982 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2983 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm), passed_mode))
2984 entry_parm = copy_to_reg (entry_parm);
2985
2986 entry_parm = convert_to_mode (nominal_mode, entry_parm, 0);
2987 }
6f086dfc
RS
2988
2989 if (entry_parm != stack_parm)
2990 {
2991 if (stack_parm == 0)
2992 stack_parm = assign_stack_local (GET_MODE (entry_parm),
2993 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
2994 emit_move_insn (validize_mem (stack_parm),
2995 validize_mem (entry_parm));
2996 }
2997
2998 DECL_RTL (parm) = stack_parm;
2999 }
3000
3001 /* If this "parameter" was the place where we are receiving the
3002 function's incoming structure pointer, set up the result. */
3003 if (parm == function_result_decl)
3004 DECL_RTL (DECL_RESULT (fndecl))
3005 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
3006
3007 if (TREE_THIS_VOLATILE (parm))
3008 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
3009 if (TREE_READONLY (parm))
3010 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
3011 }
3012
3013 max_parm_reg = max_reg_num ();
3014 last_parm_insn = get_last_insn ();
3015
3016 current_function_args_size = stack_args_size.constant;
3017
3018 /* Adjust function incoming argument size for alignment and
3019 minimum length. */
3020
3021#ifdef REG_PARM_STACK_SPACE
6f90e075 3022#ifndef MAYBE_REG_PARM_STACK_SPACE
6f086dfc
RS
3023 current_function_args_size = MAX (current_function_args_size,
3024 REG_PARM_STACK_SPACE (fndecl));
3025#endif
6f90e075 3026#endif
6f086dfc
RS
3027
3028#ifdef STACK_BOUNDARY
3029#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
3030
3031 current_function_args_size
3032 = ((current_function_args_size + STACK_BYTES - 1)
3033 / STACK_BYTES) * STACK_BYTES;
3034#endif
3035
3036#ifdef ARGS_GROW_DOWNWARD
3037 current_function_arg_offset_rtx
5f4f0e22 3038 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
6f086dfc
RS
3039 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
3040 size_int (-stack_args_size.constant)),
5f4f0e22 3041 NULL_RTX, VOIDmode, 0));
6f086dfc
RS
3042#else
3043 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
3044#endif
3045
3046 /* See how many bytes, if any, of its args a function should try to pop
3047 on return. */
3048
3049 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
3050 current_function_args_size);
3051
3052 /* For stdarg.h function, save info about regs and stack space
3053 used by the named args. */
3054
3055 if (stdarg)
3056 current_function_args_info = args_so_far;
3057
3058 /* Set the rtx used for the function return value. Put this in its
3059 own variable so any optimizers that need this information don't have
3060 to include tree.h. Do this here so it gets done when an inlined
3061 function gets output. */
3062
3063 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
3064}
3065\f
3066/* Compute the size and offset from the start of the stacked arguments for a
3067 parm passed in mode PASSED_MODE and with type TYPE.
3068
3069 INITIAL_OFFSET_PTR points to the current offset into the stacked
3070 arguments.
3071
3072 The starting offset and size for this parm are returned in *OFFSET_PTR
3073 and *ARG_SIZE_PTR, respectively.
3074
3075 IN_REGS is non-zero if the argument will be passed in registers. It will
3076 never be set if REG_PARM_STACK_SPACE is not defined.
3077
3078 FNDECL is the function in which the argument was defined.
3079
3080 There are two types of rounding that are done. The first, controlled by
3081 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3082 list to be aligned to the specific boundary (in bits). This rounding
3083 affects the initial and starting offsets, but not the argument size.
3084
3085 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3086 optionally rounds the size of the parm to PARM_BOUNDARY. The
3087 initial offset is not affected by this rounding, while the size always
3088 is and the starting offset may be. */
3089
3090/* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
3091 initial_offset_ptr is positive because locate_and_pad_parm's
3092 callers pass in the total size of args so far as
3093 initial_offset_ptr. arg_size_ptr is always positive.*/
3094
3095static void pad_to_arg_alignment (), pad_below ();
3096
3097void
3098locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
3099 initial_offset_ptr, offset_ptr, arg_size_ptr)
3100 enum machine_mode passed_mode;
3101 tree type;
3102 int in_regs;
3103 tree fndecl;
3104 struct args_size *initial_offset_ptr;
3105 struct args_size *offset_ptr;
3106 struct args_size *arg_size_ptr;
3107{
3108 tree sizetree
3109 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3110 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3111 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3112 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3113 int reg_parm_stack_space = 0;
3114
3115#ifdef REG_PARM_STACK_SPACE
3116 /* If we have found a stack parm before we reach the end of the
3117 area reserved for registers, skip that area. */
3118 if (! in_regs)
3119 {
29008b51
JW
3120#ifdef MAYBE_REG_PARM_STACK_SPACE
3121 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3122#else
6f086dfc 3123 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
29008b51 3124#endif
6f086dfc
RS
3125 if (reg_parm_stack_space > 0)
3126 {
3127 if (initial_offset_ptr->var)
3128 {
3129 initial_offset_ptr->var
3130 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3131 size_int (reg_parm_stack_space));
3132 initial_offset_ptr->constant = 0;
3133 }
3134 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3135 initial_offset_ptr->constant = reg_parm_stack_space;
3136 }
3137 }
3138#endif /* REG_PARM_STACK_SPACE */
3139
3140 arg_size_ptr->var = 0;
3141 arg_size_ptr->constant = 0;
3142
3143#ifdef ARGS_GROW_DOWNWARD
3144 if (initial_offset_ptr->var)
3145 {
3146 offset_ptr->constant = 0;
3147 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3148 initial_offset_ptr->var);
3149 }
3150 else
3151 {
3152 offset_ptr->constant = - initial_offset_ptr->constant;
3153 offset_ptr->var = 0;
3154 }
3155 if (where_pad == upward
3156 && (TREE_CODE (sizetree) != INTEGER_CST
3157 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3158 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3159 SUB_PARM_SIZE (*offset_ptr, sizetree);
3160 pad_to_arg_alignment (offset_ptr, boundary);
3161 if (initial_offset_ptr->var)
3162 {
3163 arg_size_ptr->var = size_binop (MINUS_EXPR,
3164 size_binop (MINUS_EXPR,
3165 integer_zero_node,
3166 initial_offset_ptr->var),
3167 offset_ptr->var);
3168 }
3169 else
3170 {
3171 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3172 offset_ptr->constant);
3173 }
3174/* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3175 if (where_pad == downward)
3176 pad_below (arg_size_ptr, passed_mode, sizetree);
3177#else /* !ARGS_GROW_DOWNWARD */
3178 pad_to_arg_alignment (initial_offset_ptr, boundary);
3179 *offset_ptr = *initial_offset_ptr;
3180 if (where_pad == downward)
3181 pad_below (offset_ptr, passed_mode, sizetree);
3182
3183#ifdef PUSH_ROUNDING
3184 if (passed_mode != BLKmode)
3185 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3186#endif
3187
3188 if (where_pad != none
3189 && (TREE_CODE (sizetree) != INTEGER_CST
3190 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3191 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3192
3193 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3194#endif /* ARGS_GROW_DOWNWARD */
3195}
3196
e16c591a
RS
3197/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3198 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3199
6f086dfc
RS
3200static void
3201pad_to_arg_alignment (offset_ptr, boundary)
3202 struct args_size *offset_ptr;
3203 int boundary;
3204{
3205 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3206
3207 if (boundary > BITS_PER_UNIT)
3208 {
3209 if (offset_ptr->var)
3210 {
3211 offset_ptr->var =
3212#ifdef ARGS_GROW_DOWNWARD
3213 round_down
3214#else
3215 round_up
3216#endif
3217 (ARGS_SIZE_TREE (*offset_ptr),
3218 boundary / BITS_PER_UNIT);
3219 offset_ptr->constant = 0; /*?*/
3220 }
3221 else
3222 offset_ptr->constant =
3223#ifdef ARGS_GROW_DOWNWARD
3224 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3225#else
3226 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3227#endif
3228 }
3229}
3230
3231static void
3232pad_below (offset_ptr, passed_mode, sizetree)
3233 struct args_size *offset_ptr;
3234 enum machine_mode passed_mode;
3235 tree sizetree;
3236{
3237 if (passed_mode != BLKmode)
3238 {
3239 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3240 offset_ptr->constant
3241 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3242 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3243 - GET_MODE_SIZE (passed_mode));
3244 }
3245 else
3246 {
3247 if (TREE_CODE (sizetree) != INTEGER_CST
3248 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3249 {
3250 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3251 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3252 /* Add it in. */
3253 ADD_PARM_SIZE (*offset_ptr, s2);
3254 SUB_PARM_SIZE (*offset_ptr, sizetree);
3255 }
3256 }
3257}
3258
3259static tree
3260round_down (value, divisor)
3261 tree value;
3262 int divisor;
3263{
3264 return size_binop (MULT_EXPR,
3265 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3266 size_int (divisor));
3267}
3268\f
3269/* Walk the tree of blocks describing the binding levels within a function
3270 and warn about uninitialized variables.
3271 This is done after calling flow_analysis and before global_alloc
3272 clobbers the pseudo-regs to hard regs. */
3273
3274void
3275uninitialized_vars_warning (block)
3276 tree block;
3277{
3278 register tree decl, sub;
3279 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3280 {
3281 if (TREE_CODE (decl) == VAR_DECL
3282 /* These warnings are unreliable for and aggregates
3283 because assigning the fields one by one can fail to convince
3284 flow.c that the entire aggregate was initialized.
3285 Unions are troublesome because members may be shorter. */
3286 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3287 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
3288 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3289 && DECL_RTL (decl) != 0
3290 && GET_CODE (DECL_RTL (decl)) == REG
3291 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3292 warning_with_decl (decl,
3293 "`%s' may be used uninitialized in this function");
3294 if (TREE_CODE (decl) == VAR_DECL
3295 && DECL_RTL (decl) != 0
3296 && GET_CODE (DECL_RTL (decl)) == REG
3297 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3298 warning_with_decl (decl,
3299 "variable `%s' may be clobbered by `longjmp'");
3300 }
3301 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3302 uninitialized_vars_warning (sub);
3303}
3304
3305/* Do the appropriate part of uninitialized_vars_warning
3306 but for arguments instead of local variables. */
3307
3308void
3309setjmp_args_warning (block)
3310 tree block;
3311{
3312 register tree decl;
3313 for (decl = DECL_ARGUMENTS (current_function_decl);
3314 decl; decl = TREE_CHAIN (decl))
3315 if (DECL_RTL (decl) != 0
3316 && GET_CODE (DECL_RTL (decl)) == REG
3317 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3318 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp'");
3319}
3320
3321/* If this function call setjmp, put all vars into the stack
3322 unless they were declared `register'. */
3323
3324void
3325setjmp_protect (block)
3326 tree block;
3327{
3328 register tree decl, sub;
3329 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3330 if ((TREE_CODE (decl) == VAR_DECL
3331 || TREE_CODE (decl) == PARM_DECL)
3332 && DECL_RTL (decl) != 0
3333 && GET_CODE (DECL_RTL (decl)) == REG
b335c2cc
TW
3334 /* If this variable came from an inline function, it must be
3335 that it's life doesn't overlap the setjmp. If there was a
3336 setjmp in the function, it would already be in memory. We
3337 must exclude such variable because their DECL_RTL might be
3338 set to strange things such as virtual_stack_vars_rtx. */
3339 && ! DECL_FROM_INLINE (decl)
6f086dfc
RS
3340 && (
3341#ifdef NON_SAVING_SETJMP
3342 /* If longjmp doesn't restore the registers,
3343 don't put anything in them. */
3344 NON_SAVING_SETJMP
3345 ||
3346#endif
a82ad570 3347 ! DECL_REGISTER (decl)))
6f086dfc
RS
3348 put_var_into_stack (decl);
3349 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3350 setjmp_protect (sub);
3351}
3352\f
3353/* Like the previous function, but for args instead of local variables. */
3354
3355void
3356setjmp_protect_args ()
3357{
3358 register tree decl, sub;
3359 for (decl = DECL_ARGUMENTS (current_function_decl);
3360 decl; decl = TREE_CHAIN (decl))
3361 if ((TREE_CODE (decl) == VAR_DECL
3362 || TREE_CODE (decl) == PARM_DECL)
3363 && DECL_RTL (decl) != 0
3364 && GET_CODE (DECL_RTL (decl)) == REG
3365 && (
3366 /* If longjmp doesn't restore the registers,
3367 don't put anything in them. */
3368#ifdef NON_SAVING_SETJMP
3369 NON_SAVING_SETJMP
3370 ||
3371#endif
a82ad570 3372 ! DECL_REGISTER (decl)))
6f086dfc
RS
3373 put_var_into_stack (decl);
3374}
3375\f
3376/* Return the context-pointer register corresponding to DECL,
3377 or 0 if it does not need one. */
3378
3379rtx
3380lookup_static_chain (decl)
3381 tree decl;
3382{
3383 tree context = decl_function_context (decl);
3384 tree link;
3385
3386 if (context == 0)
3387 return 0;
3388
3389 /* We treat inline_function_decl as an alias for the current function
3390 because that is the inline function whose vars, types, etc.
3391 are being merged into the current function.
3392 See expand_inline_function. */
3393 if (context == current_function_decl || context == inline_function_decl)
3394 return virtual_stack_vars_rtx;
3395
3396 for (link = context_display; link; link = TREE_CHAIN (link))
3397 if (TREE_PURPOSE (link) == context)
3398 return RTL_EXPR_RTL (TREE_VALUE (link));
3399
3400 abort ();
3401}
3402\f
3403/* Convert a stack slot address ADDR for variable VAR
3404 (from a containing function)
3405 into an address valid in this function (using a static chain). */
3406
3407rtx
3408fix_lexical_addr (addr, var)
3409 rtx addr;
3410 tree var;
3411{
3412 rtx basereg;
3413 int displacement;
3414 tree context = decl_function_context (var);
3415 struct function *fp;
3416 rtx base = 0;
3417
3418 /* If this is the present function, we need not do anything. */
3419 if (context == current_function_decl || context == inline_function_decl)
3420 return addr;
3421
3422 for (fp = outer_function_chain; fp; fp = fp->next)
3423 if (fp->decl == context)
3424 break;
3425
3426 if (fp == 0)
3427 abort ();
3428
3429 /* Decode given address as base reg plus displacement. */
3430 if (GET_CODE (addr) == REG)
3431 basereg = addr, displacement = 0;
3432 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3433 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3434 else
3435 abort ();
3436
3437 /* We accept vars reached via the containing function's
3438 incoming arg pointer and via its stack variables pointer. */
3439 if (basereg == fp->internal_arg_pointer)
3440 {
3441 /* If reached via arg pointer, get the arg pointer value
3442 out of that function's stack frame.
3443
3444 There are two cases: If a separate ap is needed, allocate a
3445 slot in the outer function for it and dereference it that way.
3446 This is correct even if the real ap is actually a pseudo.
3447 Otherwise, just adjust the offset from the frame pointer to
3448 compensate. */
3449
3450#ifdef NEED_SEPARATE_AP
3451 rtx addr;
3452
3453 if (fp->arg_pointer_save_area == 0)
3454 fp->arg_pointer_save_area
3455 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
3456
3457 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
3458 addr = memory_address (Pmode, addr);
3459
3460 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
3461#else
3462 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
86f8eff3 3463 base = lookup_static_chain (var);
6f086dfc
RS
3464#endif
3465 }
3466
3467 else if (basereg == virtual_stack_vars_rtx)
3468 {
3469 /* This is the same code as lookup_static_chain, duplicated here to
3470 avoid an extra call to decl_function_context. */
3471 tree link;
3472
3473 for (link = context_display; link; link = TREE_CHAIN (link))
3474 if (TREE_PURPOSE (link) == context)
3475 {
3476 base = RTL_EXPR_RTL (TREE_VALUE (link));
3477 break;
3478 }
3479 }
3480
3481 if (base == 0)
3482 abort ();
3483
3484 /* Use same offset, relative to appropriate static chain or argument
3485 pointer. */
3486 return plus_constant (base, displacement);
3487}
3488\f
3489/* Return the address of the trampoline for entering nested fn FUNCTION.
3490 If necessary, allocate a trampoline (in the stack frame)
3491 and emit rtl to initialize its contents (at entry to this function). */
3492
3493rtx
3494trampoline_address (function)
3495 tree function;
3496{
3497 tree link;
3498 tree rtlexp;
3499 rtx tramp;
3500 struct function *fp;
3501 tree fn_context;
3502
3503 /* Find an existing trampoline and return it. */
3504 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3505 if (TREE_PURPOSE (link) == function)
3506 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
3507 for (fp = outer_function_chain; fp; fp = fp->next)
3508 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
3509 if (TREE_PURPOSE (link) == function)
3510 {
3511 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
3512 function);
3513 return round_trampoline_addr (tramp);
3514 }
3515
3516 /* None exists; we must make one. */
3517
3518 /* Find the `struct function' for the function containing FUNCTION. */
3519 fp = 0;
3520 fn_context = decl_function_context (function);
3521 if (fn_context != current_function_decl)
3522 for (fp = outer_function_chain; fp; fp = fp->next)
3523 if (fp->decl == fn_context)
3524 break;
3525
3526 /* Allocate run-time space for this trampoline
3527 (usually in the defining function's stack frame). */
3528#ifdef ALLOCATE_TRAMPOLINE
3529 tramp = ALLOCATE_TRAMPOLINE (fp);
3530#else
3531 /* If rounding needed, allocate extra space
3532 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3533#ifdef TRAMPOLINE_ALIGNMENT
3534#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3535#else
3536#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3537#endif
3538 if (fp != 0)
3539 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
3540 else
3541 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
3542#endif
3543
3544 /* Record the trampoline for reuse and note it for later initialization
3545 by expand_function_end. */
3546 if (fp != 0)
3547 {
3548 push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack);
3549 rtlexp = make_node (RTL_EXPR);
3550 RTL_EXPR_RTL (rtlexp) = tramp;
3551 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
3552 pop_obstacks ();
3553 }
3554 else
3555 {
3556 /* Make the RTL_EXPR node temporary, not momentary, so that the
3557 trampoline_list doesn't become garbage. */
3558 int momentary = suspend_momentary ();
3559 rtlexp = make_node (RTL_EXPR);
3560 resume_momentary (momentary);
3561
3562 RTL_EXPR_RTL (rtlexp) = tramp;
3563 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
3564 }
3565
3566 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
3567 return round_trampoline_addr (tramp);
3568}
3569
3570/* Given a trampoline address,
3571 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3572
3573static rtx
3574round_trampoline_addr (tramp)
3575 rtx tramp;
3576{
3577#ifdef TRAMPOLINE_ALIGNMENT
3578 /* Round address up to desired boundary. */
3579 rtx temp = gen_reg_rtx (Pmode);
3580 temp = expand_binop (Pmode, add_optab, tramp,
5f4f0e22 3581 GEN_INT (TRAMPOLINE_ALIGNMENT - 1),
6f086dfc
RS
3582 temp, 0, OPTAB_LIB_WIDEN);
3583 tramp = expand_binop (Pmode, and_optab, temp,
5f4f0e22 3584 GEN_INT (- TRAMPOLINE_ALIGNMENT),
6f086dfc
RS
3585 temp, 0, OPTAB_LIB_WIDEN);
3586#endif
3587 return tramp;
3588}
3589\f
467456d0
RS
3590/* The functions identify_blocks and reorder_blocks provide a way to
3591 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
3592 duplicate portions of the RTL code. Call identify_blocks before
3593 changing the RTL, and call reorder_blocks after. */
3594
3595static int all_blocks ();
3596static tree blocks_nreverse ();
3597
3598/* Put all this function's BLOCK nodes into a vector, and return it.
3599 Also store in each NOTE for the beginning or end of a block
3600 the index of that block in the vector.
3601 The arguments are TOP_BLOCK, the top-level block of the function,
3602 and INSNS, the insn chain of the function. */
3603
3604tree *
3605identify_blocks (top_block, insns)
3606 tree top_block;
3607 rtx insns;
3608{
fc289cd1
JW
3609 int n_blocks;
3610 tree *block_vector;
3611 int *block_stack;
467456d0
RS
3612 int depth = 0;
3613 int next_block_number = 0;
3614 int current_block_number = 0;
3615 rtx insn;
3616
fc289cd1
JW
3617 if (top_block == 0)
3618 return 0;
3619
3620 n_blocks = all_blocks (top_block, 0);
3621 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
3622 block_stack = (int *) alloca (n_blocks * sizeof (int));
3623
467456d0
RS
3624 all_blocks (top_block, block_vector);
3625
3626 for (insn = insns; insn; insn = NEXT_INSN (insn))
3627 if (GET_CODE (insn) == NOTE)
3628 {
3629 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3630 {
3631 block_stack[depth++] = current_block_number;
3632 current_block_number = next_block_number;
1b2ac438 3633 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
467456d0
RS
3634 }
3635 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3636 {
3637 current_block_number = block_stack[--depth];
1b2ac438 3638 NOTE_BLOCK_NUMBER (insn) = current_block_number;
467456d0
RS
3639 }
3640 }
3641
3642 return block_vector;
3643}
3644
3645/* Given BLOCK_VECTOR which was returned by identify_blocks,
3646 and a revised instruction chain, rebuild the tree structure
3647 of BLOCK nodes to correspond to the new order of RTL.
fc289cd1 3648 The new block tree is inserted below TOP_BLOCK.
467456d0
RS
3649 Returns the current top-level block. */
3650
3651tree
fc289cd1 3652reorder_blocks (block_vector, top_block, insns)
467456d0 3653 tree *block_vector;
fc289cd1 3654 tree top_block;
467456d0
RS
3655 rtx insns;
3656{
fc289cd1 3657 tree current_block = top_block;
467456d0
RS
3658 rtx insn;
3659
fc289cd1
JW
3660 if (block_vector == 0)
3661 return top_block;
3662
3663 /* Prune the old tree away, so that it doesn't get in the way. */
3664 BLOCK_SUBBLOCKS (current_block) = 0;
3665
467456d0
RS
3666 for (insn = insns; insn; insn = NEXT_INSN (insn))
3667 if (GET_CODE (insn) == NOTE)
3668 {
3669 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3670 {
3671 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
3672 /* If we have seen this block before, copy it. */
3673 if (TREE_ASM_WRITTEN (block))
3674 block = copy_node (block);
fc289cd1 3675 BLOCK_SUBBLOCKS (block) = 0;
467456d0
RS
3676 TREE_ASM_WRITTEN (block) = 1;
3677 BLOCK_SUPERCONTEXT (block) = current_block;
3678 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3679 BLOCK_SUBBLOCKS (current_block) = block;
3680 current_block = block;
1b2ac438 3681 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
3682 }
3683 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3684 {
3685 BLOCK_SUBBLOCKS (current_block)
3686 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3687 current_block = BLOCK_SUPERCONTEXT (current_block);
1b2ac438 3688 NOTE_SOURCE_FILE (insn) = 0;
467456d0
RS
3689 }
3690 }
3691
3692 return current_block;
3693}
3694
3695/* Reverse the order of elements in the chain T of blocks,
3696 and return the new head of the chain (old last element). */
3697
3698static tree
3699blocks_nreverse (t)
3700 tree t;
3701{
3702 register tree prev = 0, decl, next;
3703 for (decl = t; decl; decl = next)
3704 {
3705 next = BLOCK_CHAIN (decl);
3706 BLOCK_CHAIN (decl) = prev;
3707 prev = decl;
3708 }
3709 return prev;
3710}
3711
3712/* Count the subblocks of BLOCK, and list them all into the vector VECTOR.
3713 Also clear TREE_ASM_WRITTEN in all blocks. */
3714
3715static int
3716all_blocks (block, vector)
3717 tree block;
3718 tree *vector;
3719{
3720 int n_blocks = 1;
3721 tree subblocks;
3722
3723 TREE_ASM_WRITTEN (block) = 0;
3724 /* Record this block. */
fc289cd1
JW
3725 if (vector)
3726 vector[0] = block;
467456d0
RS
3727
3728 /* Record the subblocks, and their subblocks. */
3729 for (subblocks = BLOCK_SUBBLOCKS (block);
3730 subblocks; subblocks = BLOCK_CHAIN (subblocks))
fc289cd1 3731 n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0);
467456d0
RS
3732
3733 return n_blocks;
3734}
3735\f
6f086dfc
RS
3736/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3737 and initialize static variables for generating RTL for the statements
3738 of the function. */
3739
3740void
3741init_function_start (subr, filename, line)
3742 tree subr;
3743 char *filename;
3744 int line;
3745{
3746 char *junk;
3747
3748 init_stmt_for_function ();
3749
3750 cse_not_expected = ! optimize;
3751
3752 /* Caller save not needed yet. */
3753 caller_save_needed = 0;
3754
3755 /* No stack slots have been made yet. */
3756 stack_slot_list = 0;
3757
3758 /* There is no stack slot for handling nonlocal gotos. */
3759 nonlocal_goto_handler_slot = 0;
3760 nonlocal_goto_stack_level = 0;
3761
3762 /* No labels have been declared for nonlocal use. */
3763 nonlocal_labels = 0;
3764
3765 /* No function calls so far in this function. */
3766 function_call_count = 0;
3767
3768 /* No parm regs have been allocated.
3769 (This is important for output_inline_function.) */
3770 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3771
3772 /* Initialize the RTL mechanism. */
3773 init_emit ();
3774
3775 /* Initialize the queue of pending postincrement and postdecrements,
3776 and some other info in expr.c. */
3777 init_expr ();
3778
3779 /* We haven't done register allocation yet. */
3780 reg_renumber = 0;
3781
3782 init_const_rtx_hash_table ();
3783
3784 current_function_name = (*decl_printable_name) (subr, &junk);
3785
3786 /* Nonzero if this is a nested function that uses a static chain. */
3787
3788 current_function_needs_context
3789 = (decl_function_context (current_function_decl) != 0);
3790
3791 /* Set if a call to setjmp is seen. */
3792 current_function_calls_setjmp = 0;
3793
3794 /* Set if a call to longjmp is seen. */
3795 current_function_calls_longjmp = 0;
3796
3797 current_function_calls_alloca = 0;
3798 current_function_has_nonlocal_label = 0;
3799 current_function_contains_functions = 0;
3800
3801 current_function_returns_pcc_struct = 0;
3802 current_function_returns_struct = 0;
3803 current_function_epilogue_delay_list = 0;
3804 current_function_uses_const_pool = 0;
3805 current_function_uses_pic_offset_table = 0;
3806
3807 /* We have not yet needed to make a label to jump to for tail-recursion. */
3808 tail_recursion_label = 0;
3809
3810 /* We haven't had a need to make a save area for ap yet. */
3811
3812 arg_pointer_save_area = 0;
3813
3814 /* No stack slots allocated yet. */
3815 frame_offset = 0;
3816
3817 /* No SAVE_EXPRs in this function yet. */
3818 save_expr_regs = 0;
3819
3820 /* No RTL_EXPRs in this function yet. */
3821 rtl_expr_chain = 0;
3822
3823 /* We have not allocated any temporaries yet. */
3824 temp_slots = 0;
3825 temp_slot_level = 0;
3826
3827 /* Within function body, compute a type's size as soon it is laid out. */
3828 immediate_size_expand++;
3829
3830 init_pending_stack_adjust ();
3831 inhibit_defer_pop = 0;
3832
3833 current_function_outgoing_args_size = 0;
3834
3835 /* Initialize the insn lengths. */
3836 init_insn_lengths ();
3837
3838 /* Prevent ever trying to delete the first instruction of a function.
3839 Also tell final how to output a linenum before the function prologue. */
3840 emit_line_note (filename, line);
3841
3842 /* Make sure first insn is a note even if we don't want linenums.
3843 This makes sure the first insn will never be deleted.
3844 Also, final expects a note to appear there. */
5f4f0e22 3845 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
3846
3847 /* Set flags used by final.c. */
3848 if (aggregate_value_p (DECL_RESULT (subr)))
3849 {
3850#ifdef PCC_STATIC_STRUCT_RETURN
3851 if (flag_pcc_struct_return)
3852 current_function_returns_pcc_struct = 1;
3853 else
3854#endif
3855 current_function_returns_struct = 1;
3856 }
3857
3858 /* Warn if this value is an aggregate type,
3859 regardless of which calling convention we are using for it. */
3860 if (warn_aggregate_return
3861 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
3862 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
3863 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
3864 warning ("function returns an aggregate");
3865
3866 current_function_returns_pointer
3867 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
3868
3869 /* Indicate that we need to distinguish between the return value of the
3870 present function and the return value of a function being called. */
3871 rtx_equal_function_value_matters = 1;
3872
3873 /* Indicate that we have not instantiated virtual registers yet. */
3874 virtuals_instantiated = 0;
3875
3876 /* Indicate we have no need of a frame pointer yet. */
3877 frame_pointer_needed = 0;
3878
3879 /* By default assume not varargs. */
3880 current_function_varargs = 0;
3881}
3882
3883/* Indicate that the current function uses extra args
3884 not explicitly mentioned in the argument list in any fashion. */
3885
3886void
3887mark_varargs ()
3888{
3889 current_function_varargs = 1;
3890}
3891
3892/* Expand a call to __main at the beginning of a possible main function. */
3893
3894void
3895expand_main_function ()
3896{
b335c2cc 3897#if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
6f086dfc
RS
3898 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__main"), 0,
3899 VOIDmode, 0);
b335c2cc 3900#endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
6f086dfc
RS
3901}
3902\f
3903/* Start the RTL for a new function, and set variables used for
3904 emitting RTL.
3905 SUBR is the FUNCTION_DECL node.
3906 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3907 the function's parameters, which must be run at any return statement. */
3908
3909void
3910expand_function_start (subr, parms_have_cleanups)
3911 tree subr;
3912 int parms_have_cleanups;
3913{
3914 register int i;
3915 tree tem;
3916 rtx last_ptr;
3917
3918 /* Make sure volatile mem refs aren't considered
3919 valid operands of arithmetic insns. */
3920 init_recog_no_volatile ();
3921
3922 /* If function gets a static chain arg, store it in the stack frame.
3923 Do this first, so it gets the first stack slot offset. */
3924 if (current_function_needs_context)
3e2481e9
JW
3925 {
3926 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3927 emit_move_insn (last_ptr, static_chain_incoming_rtx);
3928 }
6f086dfc
RS
3929
3930 /* If the parameters of this function need cleaning up, get a label
3931 for the beginning of the code which executes those cleanups. This must
3932 be done before doing anything with return_label. */
3933 if (parms_have_cleanups)
3934 cleanup_label = gen_label_rtx ();
3935 else
3936 cleanup_label = 0;
3937
3938 /* Make the label for return statements to jump to, if this machine
3939 does not have a one-instruction return and uses an epilogue,
3940 or if it returns a structure, or if it has parm cleanups. */
3941#ifdef HAVE_return
3942 if (cleanup_label == 0 && HAVE_return
3943 && ! current_function_returns_pcc_struct
3944 && ! (current_function_returns_struct && ! optimize))
3945 return_label = 0;
3946 else
3947 return_label = gen_label_rtx ();
3948#else
3949 return_label = gen_label_rtx ();
3950#endif
3951
3952 /* Initialize rtx used to return the value. */
3953 /* Do this before assign_parms so that we copy the struct value address
3954 before any library calls that assign parms might generate. */
3955
3956 /* Decide whether to return the value in memory or in a register. */
3957 if (aggregate_value_p (DECL_RESULT (subr)))
3958 {
3959 /* Returning something that won't go in a register. */
3960 register rtx value_address;
3961
3962#ifdef PCC_STATIC_STRUCT_RETURN
3963 if (current_function_returns_pcc_struct)
3964 {
3965 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
3966 value_address = assemble_static_space (size);
3967 }
3968 else
3969#endif
3970 {
3971 /* Expect to be passed the address of a place to store the value.
3972 If it is passed as an argument, assign_parms will take care of
3973 it. */
3974 if (struct_value_incoming_rtx)
3975 {
3976 value_address = gen_reg_rtx (Pmode);
3977 emit_move_insn (value_address, struct_value_incoming_rtx);
3978 }
3979 }
3980 if (value_address)
3981 DECL_RTL (DECL_RESULT (subr))
3982 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
3983 value_address);
3984 }
3985 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
3986 /* If return mode is void, this decl rtl should not be used. */
3987 DECL_RTL (DECL_RESULT (subr)) = 0;
3988 else if (parms_have_cleanups)
3989 /* If function will end with cleanup code for parms,
3990 compute the return values into a pseudo reg,
3991 which we will copy into the true return register
3992 after the cleanups are done. */
3993 DECL_RTL (DECL_RESULT (subr))
3994 = gen_reg_rtx (DECL_MODE (DECL_RESULT (subr)));
3995 else
3996 /* Scalar, returned in a register. */
3997 {
3998#ifdef FUNCTION_OUTGOING_VALUE
3999 DECL_RTL (DECL_RESULT (subr))
4000 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4001#else
4002 DECL_RTL (DECL_RESULT (subr))
4003 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4004#endif
4005
4006 /* Mark this reg as the function's return value. */
4007 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
4008 {
4009 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
4010 /* Needed because we may need to move this to memory
4011 in case it's a named return value whose address is taken. */
a82ad570 4012 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6f086dfc
RS
4013 }
4014 }
4015
4016 /* Initialize rtx for parameters and local variables.
4017 In some cases this requires emitting insns. */
4018
4019 assign_parms (subr, 0);
4020
4021 /* The following was moved from init_function_start.
4022 The move is supposed to make sdb output more accurate. */
4023 /* Indicate the beginning of the function body,
4024 as opposed to parm setup. */
5f4f0e22 4025 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6f086dfc
RS
4026
4027 /* If doing stupid allocation, mark parms as born here. */
4028
4029 if (GET_CODE (get_last_insn ()) != NOTE)
5f4f0e22 4030 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
4031 parm_birth_insn = get_last_insn ();
4032
4033 if (obey_regdecls)
4034 {
4035 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4036 use_variable (regno_reg_rtx[i]);
4037
4038 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4039 use_variable (current_function_internal_arg_pointer);
4040 }
4041
4042 /* Fetch static chain values for containing functions. */
4043 tem = decl_function_context (current_function_decl);
3e2481e9
JW
4044 /* If not doing stupid register allocation, then start off with the static
4045 chain pointer in a pseudo register. Otherwise, we use the stack
4046 address that was generated above. */
4047 if (tem && ! obey_regdecls)
6f086dfc
RS
4048 last_ptr = copy_to_reg (static_chain_incoming_rtx);
4049 context_display = 0;
4050 while (tem)
4051 {
4052 tree rtlexp = make_node (RTL_EXPR);
4053
4054 RTL_EXPR_RTL (rtlexp) = last_ptr;
4055 context_display = tree_cons (tem, rtlexp, context_display);
4056 tem = decl_function_context (tem);
4057 if (tem == 0)
4058 break;
4059 /* Chain thru stack frames, assuming pointer to next lexical frame
4060 is found at the place we always store it. */
4061#ifdef FRAME_GROWS_DOWNWARD
4062 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
4063#endif
4064 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
4065 memory_address (Pmode, last_ptr)));
4066 }
4067
4068 /* After the display initializations is where the tail-recursion label
4069 should go, if we end up needing one. Ensure we have a NOTE here
4070 since some things (like trampolines) get placed before this. */
5f4f0e22 4071 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6f086dfc
RS
4072
4073 /* Evaluate now the sizes of any types declared among the arguments. */
4074 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5f4f0e22 4075 expand_expr (TREE_VALUE (tem), NULL_RTX, VOIDmode, 0);
6f086dfc
RS
4076
4077 /* Make sure there is a line number after the function entry setup code. */
4078 force_next_line_note ();
4079}
4080\f
4081/* Generate RTL for the end of the current function.
4082 FILENAME and LINE are the current position in the source file. */
4083
4084/* It is up to language-specific callers to do cleanups for parameters. */
4085
4086void
4087expand_function_end (filename, line)
4088 char *filename;
4089 int line;
4090{
4091 register int i;
4092 tree link;
4093
4094 static rtx initial_trampoline;
4095
4096#ifdef NON_SAVING_SETJMP
4097 /* Don't put any variables in registers if we call setjmp
4098 on a machine that fails to restore the registers. */
4099 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
4100 {
4101 setjmp_protect (DECL_INITIAL (current_function_decl));
4102 setjmp_protect_args ();
4103 }
4104#endif
4105
4106 /* Save the argument pointer if a save area was made for it. */
4107 if (arg_pointer_save_area)
4108 {
4109 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
4110 emit_insn_before (x, tail_recursion_reentry);
4111 }
4112
4113 /* Initialize any trampolines required by this function. */
4114 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4115 {
4116 tree function = TREE_PURPOSE (link);
4117 rtx context = lookup_static_chain (function);
4118 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
4119 rtx seq;
4120
4121 /* First make sure this compilation has a template for
4122 initializing trampolines. */
4123 if (initial_trampoline == 0)
86f8eff3
RK
4124 {
4125 end_temporary_allocation ();
4126 initial_trampoline
4127 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
4128 resume_temporary_allocation ();
4129 }
6f086dfc
RS
4130
4131 /* Generate insns to initialize the trampoline. */
4132 start_sequence ();
4133 tramp = change_address (initial_trampoline, BLKmode,
4134 round_trampoline_addr (XEXP (tramp, 0)));
5f4f0e22 4135 emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE),
6f086dfc
RS
4136 FUNCTION_BOUNDARY / BITS_PER_UNIT);
4137 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
4138 XEXP (DECL_RTL (function), 0), context);
4139 seq = get_insns ();
4140 end_sequence ();
4141
4142 /* Put those insns at entry to the containing function (this one). */
4143 emit_insns_before (seq, tail_recursion_reentry);
4144 }
4145 /* Clear the trampoline_list for the next function. */
4146 trampoline_list = 0;
4147
4148#if 0 /* I think unused parms are legitimate enough. */
4149 /* Warn about unused parms. */
4150 if (warn_unused)
4151 {
4152 rtx decl;
4153
4154 for (decl = DECL_ARGUMENTS (current_function_decl);
4155 decl; decl = TREE_CHAIN (decl))
4156 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
4157 warning_with_decl (decl, "unused parameter `%s'");
4158 }
4159#endif
4160
4161 /* Delete handlers for nonlocal gotos if nothing uses them. */
4162 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
4163 delete_handlers ();
4164
4165 /* End any sequences that failed to be closed due to syntax errors. */
4166 while (in_sequence_p ())
5f4f0e22 4167 end_sequence ();
6f086dfc
RS
4168
4169 /* Outside function body, can't compute type's actual size
4170 until next function's body starts. */
4171 immediate_size_expand--;
4172
4173 /* If doing stupid register allocation,
4174 mark register parms as dying here. */
4175
4176 if (obey_regdecls)
4177 {
4178 rtx tem;
4179 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4180 use_variable (regno_reg_rtx[i]);
4181
4182 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
4183
4184 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
4185 {
4186 use_variable (XEXP (tem, 0));
4187 use_variable_after (XEXP (tem, 0), parm_birth_insn);
4188 }
4189
4190 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4191 use_variable (current_function_internal_arg_pointer);
4192 }
4193
4194 clear_pending_stack_adjust ();
4195 do_pending_stack_adjust ();
4196
4197 /* Mark the end of the function body.
4198 If control reaches this insn, the function can drop through
4199 without returning a value. */
5f4f0e22 4200 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6f086dfc
RS
4201
4202 /* Output a linenumber for the end of the function.
4203 SDB depends on this. */
4204 emit_line_note_force (filename, line);
4205
4206 /* Output the label for the actual return from the function,
4207 if one is expected. This happens either because a function epilogue
4208 is used instead of a return instruction, or because a return was done
4209 with a goto in order to run local cleanups, or because of pcc-style
4210 structure returning. */
4211
4212 if (return_label)
4213 emit_label (return_label);
4214
4215 /* If we had calls to alloca, and this machine needs
4216 an accurate stack pointer to exit the function,
4217 insert some code to save and restore the stack pointer. */
4218#ifdef EXIT_IGNORE_STACK
4219 if (! EXIT_IGNORE_STACK)
4220#endif
4221 if (current_function_calls_alloca)
4222 {
59257ff7
RK
4223 rtx tem = 0;
4224
4225 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5f4f0e22 4226 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6f086dfc
RS
4227 }
4228
4229 /* If scalar return value was computed in a pseudo-reg,
4230 copy that to the hard return register. */
4231 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
4232 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
4233 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
4234 >= FIRST_PSEUDO_REGISTER))
4235 {
4236 rtx real_decl_result;
4237
4238#ifdef FUNCTION_OUTGOING_VALUE
4239 real_decl_result
4240 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4241 current_function_decl);
4242#else
4243 real_decl_result
4244 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4245 current_function_decl);
4246#endif
4247 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
4248 emit_move_insn (real_decl_result,
4249 DECL_RTL (DECL_RESULT (current_function_decl)));
4250 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
4251 }
4252
4253 /* If returning a structure, arrange to return the address of the value
4254 in a place where debuggers expect to find it.
4255
4256 If returning a structure PCC style,
4257 the caller also depends on this value.
4258 And current_function_returns_pcc_struct is not necessarily set. */
4259 if (current_function_returns_struct
4260 || current_function_returns_pcc_struct)
4261 {
4262 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4263 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4264#ifdef FUNCTION_OUTGOING_VALUE
4265 rtx outgoing
4266 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4267 current_function_decl);
4268#else
4269 rtx outgoing
4270 = FUNCTION_VALUE (build_pointer_type (type),
4271 current_function_decl);
4272#endif
4273
4274 /* Mark this as a function return value so integrate will delete the
4275 assignment and USE below when inlining this function. */
4276 REG_FUNCTION_VALUE_P (outgoing) = 1;
4277
4278 emit_move_insn (outgoing, value_address);
4279 use_variable (outgoing);
4280 }
4281
4282 /* Output a return insn if we are using one.
4283 Otherwise, let the rtl chain end here, to drop through
4284 into the epilogue. */
4285
4286#ifdef HAVE_return
4287 if (HAVE_return)
4288 {
4289 emit_jump_insn (gen_return ());
4290 emit_barrier ();
4291 }
4292#endif
4293
4294 /* Fix up any gotos that jumped out to the outermost
4295 binding level of the function.
4296 Must follow emitting RETURN_LABEL. */
4297
4298 /* If you have any cleanups to do at this point,
4299 and they need to create temporary variables,
4300 then you will lose. */
5f4f0e22 4301 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0);
6f086dfc 4302}
bdac5f58
TW
4303\f
4304/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
4305
4306static int *prologue;
4307static int *epilogue;
4308
4309/* Create an array that records the INSN_UIDs of INSNS (either a sequence
4310 or a single insn). */
4311
4312static int *
4313record_insns (insns)
4314 rtx insns;
4315{
4316 int *vec;
4317
4318 if (GET_CODE (insns) == SEQUENCE)
4319 {
4320 int len = XVECLEN (insns, 0);
4321 vec = (int *) oballoc ((len + 1) * sizeof (int));
4322 vec[len] = 0;
4323 while (--len >= 0)
4324 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
4325 }
4326 else
4327 {
4328 vec = (int *) oballoc (2 * sizeof (int));
4329 vec[0] = INSN_UID (insns);
4330 vec[1] = 0;
4331 }
4332 return vec;
4333}
4334
10914065 4335/* Determine how many INSN_UIDs in VEC are part of INSN. */
bdac5f58 4336
10914065 4337static int
bdac5f58
TW
4338contains (insn, vec)
4339 rtx insn;
4340 int *vec;
4341{
4342 register int i, j;
4343
4344 if (GET_CODE (insn) == INSN
4345 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4346 {
10914065 4347 int count = 0;
bdac5f58
TW
4348 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4349 for (j = 0; vec[j]; j++)
4350 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
10914065
TW
4351 count++;
4352 return count;
bdac5f58
TW
4353 }
4354 else
4355 {
4356 for (j = 0; vec[j]; j++)
4357 if (INSN_UID (insn) == vec[j])
10914065 4358 return 1;
bdac5f58
TW
4359 }
4360 return 0;
4361}
4362
4363/* Generate the prologe and epilogue RTL if the machine supports it. Thread
4364 this into place with notes indicating where the prologue ends and where
4365 the epilogue begins. Update the basic block information when possible. */
4366
4367void
4368thread_prologue_and_epilogue_insns (f)
4369 rtx f;
4370{
4371#ifdef HAVE_prologue
4372 if (HAVE_prologue)
4373 {
4374 rtx head, seq, insn;
4375
4376 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
4377 prologue insns and a NOTE_INSN_PROLOGUE_END. */
4378 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
4379 seq = gen_prologue ();
4380 head = emit_insn_after (seq, f);
4381
4382 /* Include the new prologue insns in the first block. Ignore them
4383 if they form a basic block unto themselves. */
4384 if (basic_block_head && n_basic_blocks
4385 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
4386 basic_block_head[0] = NEXT_INSN (f);
4387
4388 /* Retain a map of the prologue insns. */
4389 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
4390 }
4391 else
4392#endif
4393 prologue = 0;
4394
4395#ifdef HAVE_epilogue
4396 if (HAVE_epilogue)
4397 {
4398 rtx insn = get_last_insn ();
4399 rtx prev = prev_nonnote_insn (insn);
4400
4401 /* If we end with a BARRIER, we don't need an epilogue. */
4402 if (! (prev && GET_CODE (prev) == BARRIER))
4403 {
4404 rtx tail, seq;
4405
4406 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG,
4407 the epilogue insns (this must include the jump insn that
4408 returns), USE insns ad the end of a function, and a BARRIER. */
4409
4410 emit_barrier_after (insn);
4411
4412 /* Place the epilogue before the USE insns at the end of a
4413 function. */
4414 while (prev
4415 && GET_CODE (prev) == INSN
4416 && GET_CODE (PATTERN (prev)) == USE)
4417 {
4418 insn = PREV_INSN (prev);
4419 prev = prev_nonnote_insn (prev);
4420 }
4421
4422 seq = gen_epilogue ();
4423 tail = emit_jump_insn_after (seq, insn);
4424 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
4425
4426 /* Include the new epilogue insns in the last block. Ignore
4427 them if they form a basic block unto themselves. */
4428 if (basic_block_end && n_basic_blocks
4429 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
4430 basic_block_end[n_basic_blocks - 1] = tail;
4431
4432 /* Retain a map of the epilogue insns. */
4433 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
4434 return;
4435 }
4436 }
4437#endif
4438 epilogue = 0;
4439}
4440
4441/* Reposition the prologue-end and epilogue-begin notes after instruction
4442 scheduling and delayed branch scheduling. */
4443
4444void
4445reposition_prologue_and_epilogue_notes (f)
4446 rtx f;
4447{
4448#if defined (HAVE_prologue) || defined (HAVE_epilogue)
4449 /* Reposition the prologue and epilogue notes. */
4450 if (n_basic_blocks)
4451 {
4452 rtx next, prev;
bf526252 4453 int len;
bdac5f58
TW
4454
4455 if (prologue)
4456 {
bf526252
RK
4457 register rtx insn, note = 0;
4458
4459 /* Scan from the beginning until we reach the last prologue insn.
4460 We apparently can't depend on basic_block_{head,end} after
4461 reorg has run. */
4462 for (len = 0; prologue[len]; len++)
4463 ;
4464 for (insn = f; insn; insn = NEXT_INSN (insn))
4465 if (GET_CODE (insn) == NOTE)
4466 {
4467 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
4468 note = insn;
4469 }
10914065 4470 else if ((len -= contains (insn, prologue)) == 0)
bdac5f58 4471 {
bf526252
RK
4472 /* Find the prologue-end note if we haven't already, and
4473 move it to just after the last prologue insn. */
4474 if (note == 0)
4475 for (note = insn; note = NEXT_INSN (note);)
4476 if (GET_CODE (note) == NOTE
4477 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
4478 break;
4479 next = NEXT_INSN (note);
4480 prev = PREV_INSN (note);
bdac5f58
TW
4481 if (prev)
4482 NEXT_INSN (prev) = next;
4483 if (next)
4484 PREV_INSN (next) = prev;
bf526252 4485 add_insn_after (note, insn);
bdac5f58
TW
4486 break;
4487 }
4488 }
4489
4490 if (epilogue)
4491 {
bf526252
RK
4492 register rtx insn, note = 0;
4493
4494 /* Scan from the end until we reach the first epilogue insn.
4495 We apparently can't depend on basic_block_{head,end} after
4496 reorg has run. */
4497 for (len = 0; epilogue[len]; len++)
4498 ;
4499 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
4500 if (GET_CODE (insn) == NOTE)
4501 {
4502 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4503 note = insn;
4504 }
10914065 4505 else if ((len -= contains (insn, epilogue)) == 0)
bdac5f58 4506 {
bf526252
RK
4507 /* Find the epilogue-begin note if we haven't already, and
4508 move it to just before the first epilogue insn. */
4509 if (note == 0)
4510 for (note = insn; note = PREV_INSN (note);)
4511 if (GET_CODE (note) == NOTE
4512 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
4513 break;
4514 next = NEXT_INSN (note);
4515 prev = PREV_INSN (note);
bdac5f58
TW
4516 if (prev)
4517 NEXT_INSN (prev) = next;
4518 if (next)
4519 PREV_INSN (next) = prev;
bf526252 4520 add_insn_after (note, PREV_INSN (insn));
bdac5f58
TW
4521 break;
4522 }
4523 }
4524 }
4525#endif /* HAVE_prologue or HAVE_epilogue */
4526}
This page took 0.483412 seconds and 5 git commands to generate.