]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
(pop_function_context, push_function_context):
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
29
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
34
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
39
40 #include "config.h"
41
42 #include <stdio.h>
43
44 #include "rtl.h"
45 #include "tree.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57
58 /* Round a value to the lowest integer less than it that is a multiple of
59 the required alignment. Avoid using division in case the value is
60 negative. Assume the alignment is a power of two. */
61 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
62
63 /* Similar, but round to the next highest integer that meets the
64 alignment. */
65 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
66
67 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
68 during rtl generation. If they are different register numbers, this is
69 always true. It may also be true if
70 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
71 generation. See fix_lexical_addr for details. */
72
73 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
74 #define NEED_SEPARATE_AP
75 #endif
76
77 /* Number of bytes of args popped by function being compiled on its return.
78 Zero if no bytes are to be popped.
79 May affect compilation of return insn or of function epilogue. */
80
81 int current_function_pops_args;
82
83 /* Nonzero if function being compiled needs to be given an address
84 where the value should be stored. */
85
86 int current_function_returns_struct;
87
88 /* Nonzero if function being compiled needs to
89 return the address of where it has put a structure value. */
90
91 int current_function_returns_pcc_struct;
92
93 /* Nonzero if function being compiled needs to be passed a static chain. */
94
95 int current_function_needs_context;
96
97 /* Nonzero if function being compiled can call setjmp. */
98
99 int current_function_calls_setjmp;
100
101 /* Nonzero if function being compiled can call longjmp. */
102
103 int current_function_calls_longjmp;
104
105 /* Nonzero if function being compiled receives nonlocal gotos
106 from nested functions. */
107
108 int current_function_has_nonlocal_label;
109
110 /* Nonzero if function being compiled contains nested functions. */
111
112 int current_function_contains_functions;
113
114 /* Nonzero if function being compiled can call alloca,
115 either as a subroutine or builtin. */
116
117 int current_function_calls_alloca;
118
119 /* Nonzero if the current function returns a pointer type */
120
121 int current_function_returns_pointer;
122
123 /* If some insns can be deferred to the delay slots of the epilogue, the
124 delay list for them is recorded here. */
125
126 rtx current_function_epilogue_delay_list;
127
128 /* If function's args have a fixed size, this is that size, in bytes.
129 Otherwise, it is -1.
130 May affect compilation of return insn or of function epilogue. */
131
132 int current_function_args_size;
133
134 /* # bytes the prologue should push and pretend that the caller pushed them.
135 The prologue must do this, but only if parms can be passed in registers. */
136
137 int current_function_pretend_args_size;
138
139 /* # of bytes of outgoing arguments required to be pushed by the prologue.
140 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
141 and no stack adjusts will be done on function calls. */
142
143 int current_function_outgoing_args_size;
144
145 /* This is the offset from the arg pointer to the place where the first
146 anonymous arg can be found, if there is one. */
147
148 rtx current_function_arg_offset_rtx;
149
150 /* Nonzero if current function uses varargs.h or equivalent.
151 Zero for functions that use stdarg.h. */
152
153 int current_function_varargs;
154
155 /* Quantities of various kinds of registers
156 used for the current function's args. */
157
158 CUMULATIVE_ARGS current_function_args_info;
159
160 /* Name of function now being compiled. */
161
162 char *current_function_name;
163
164 /* If non-zero, an RTL expression for that location at which the current
165 function returns its result. Always equal to
166 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
167 independently of the tree structures. */
168
169 rtx current_function_return_rtx;
170
171 /* Nonzero if the current function uses the constant pool. */
172
173 int current_function_uses_const_pool;
174
175 /* Nonzero if the current function uses pic_offset_table_rtx. */
176 int current_function_uses_pic_offset_table;
177
178 /* The arg pointer hard register, or the pseudo into which it was copied. */
179 rtx current_function_internal_arg_pointer;
180
181 /* The FUNCTION_DECL for an inline function currently being expanded. */
182 tree inline_function_decl;
183
184 /* Number of function calls seen so far in current function. */
185
186 int function_call_count;
187
188 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
189 (labels to which there can be nonlocal gotos from nested functions)
190 in this function. */
191
192 tree nonlocal_labels;
193
194 /* RTX for stack slot that holds the current handler for nonlocal gotos.
195 Zero when function does not have nonlocal labels. */
196
197 rtx nonlocal_goto_handler_slot;
198
199 /* RTX for stack slot that holds the stack pointer value to restore
200 for a nonlocal goto.
201 Zero when function does not have nonlocal labels. */
202
203 rtx nonlocal_goto_stack_level;
204
205 /* Label that will go on parm cleanup code, if any.
206 Jumping to this label runs cleanup code for parameters, if
207 such code must be run. Following this code is the logical return label. */
208
209 rtx cleanup_label;
210
211 /* Label that will go on function epilogue.
212 Jumping to this label serves as a "return" instruction
213 on machines which require execution of the epilogue on all returns. */
214
215 rtx return_label;
216
217 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
218 So we can mark them all live at the end of the function, if nonopt. */
219 rtx save_expr_regs;
220
221 /* List (chain of EXPR_LISTs) of all stack slots in this function.
222 Made for the sake of unshare_all_rtl. */
223 rtx stack_slot_list;
224
225 /* Chain of all RTL_EXPRs that have insns in them. */
226 tree rtl_expr_chain;
227
228 /* Label to jump back to for tail recursion, or 0 if we have
229 not yet needed one for this function. */
230 rtx tail_recursion_label;
231
232 /* Place after which to insert the tail_recursion_label if we need one. */
233 rtx tail_recursion_reentry;
234
235 /* Location at which to save the argument pointer if it will need to be
236 referenced. There are two cases where this is done: if nonlocal gotos
237 exist, or if vars stored at an offset from the argument pointer will be
238 needed by inner routines. */
239
240 rtx arg_pointer_save_area;
241
242 /* Offset to end of allocated area of stack frame.
243 If stack grows down, this is the address of the last stack slot allocated.
244 If stack grows up, this is the address for the next slot. */
245 int frame_offset;
246
247 /* List (chain of TREE_LISTs) of static chains for containing functions.
248 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
249 in an RTL_EXPR in the TREE_VALUE. */
250 static tree context_display;
251
252 /* List (chain of TREE_LISTs) of trampolines for nested functions.
253 The trampoline sets up the static chain and jumps to the function.
254 We supply the trampoline's address when the function's address is requested.
255
256 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
257 in an RTL_EXPR in the TREE_VALUE. */
258 static tree trampoline_list;
259
260 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
261 static rtx parm_birth_insn;
262
263 #if 0
264 /* Nonzero if a stack slot has been generated whose address is not
265 actually valid. It means that the generated rtl must all be scanned
266 to detect and correct the invalid addresses where they occur. */
267 static int invalid_stack_slot;
268 #endif
269
270 /* Last insn of those whose job was to put parms into their nominal homes. */
271 static rtx last_parm_insn;
272
273 /* 1 + last pseudo register number used for loading a copy
274 of a parameter of this function. */
275 static int max_parm_reg;
276
277 /* Vector indexed by REGNO, containing location on stack in which
278 to put the parm which is nominally in pseudo register REGNO,
279 if we discover that that parm must go in the stack. */
280 static rtx *parm_reg_stack_loc;
281
282 #if 0 /* Turned off because 0 seems to work just as well. */
283 /* Cleanup lists are required for binding levels regardless of whether
284 that binding level has cleanups or not. This node serves as the
285 cleanup list whenever an empty list is required. */
286 static tree empty_cleanup_list;
287 #endif
288
289 /* Nonzero once virtual register instantiation has been done.
290 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
291 static int virtuals_instantiated;
292
293 /* Nonzero if we need to distinguish between the return value of this function
294 and the return value of a function called by this function. This helps
295 integrate.c */
296
297 extern int rtx_equal_function_value_matters;
298
299 void fixup_gotos ();
300
301 static tree round_down ();
302 static rtx round_trampoline_addr ();
303 static rtx fixup_stack_1 ();
304 static void fixup_var_refs ();
305 static void fixup_var_refs_insns ();
306 static void fixup_var_refs_1 ();
307 static void optimize_bit_field ();
308 static void instantiate_decls ();
309 static void instantiate_decls_1 ();
310 static void instantiate_decl ();
311 static int instantiate_virtual_regs_1 ();
312 static rtx fixup_memory_subreg ();
313 static rtx walk_fixup_memory_subreg ();
314 \f
315 /* In order to evaluate some expressions, such as function calls returning
316 structures in memory, we need to temporarily allocate stack locations.
317 We record each allocated temporary in the following structure.
318
319 Associated with each temporary slot is a nesting level. When we pop up
320 one level, all temporaries associated with the previous level are freed.
321 Normally, all temporaries are freed after the execution of the statement
322 in which they were created. However, if we are inside a ({...}) grouping,
323 the result may be in a temporary and hence must be preserved. If the
324 result could be in a temporary, we preserve it if we can determine which
325 one it is in. If we cannot determine which temporary may contain the
326 result, all temporaries are preserved. A temporary is preserved by
327 pretending it was allocated at the previous nesting level.
328
329 Automatic variables are also assigned temporary slots, at the nesting
330 level where they are defined. They are marked a "kept" so that
331 free_temp_slots will not free them. */
332
333 struct temp_slot
334 {
335 /* Points to next temporary slot. */
336 struct temp_slot *next;
337 /* The rtx to used to reference the slot. */
338 rtx slot;
339 /* The size, in units, of the slot. */
340 int size;
341 /* Non-zero if this temporary is currently in use. */
342 char in_use;
343 /* Nesting level at which this slot is being used. */
344 int level;
345 /* Non-zero if this should survive a call to free_temp_slots. */
346 int keep;
347 };
348
349 /* List of all temporaries allocated, both available and in use. */
350
351 struct temp_slot *temp_slots;
352
353 /* Current nesting level for temporaries. */
354
355 int temp_slot_level;
356 \f
357 /* Pointer to chain of `struct function' for containing functions. */
358 struct function *outer_function_chain;
359
360 /* Given a function decl for a containing function,
361 return the `struct function' for it. */
362
363 struct function *
364 find_function_data (decl)
365 tree decl;
366 {
367 struct function *p;
368 for (p = outer_function_chain; p; p = p->next)
369 if (p->decl == decl)
370 return p;
371 abort ();
372 }
373
374 /* Save the current context for compilation of a nested function.
375 This is called from language-specific code.
376 The caller is responsible for saving any language-specific status,
377 since this function knows only about language-independent variables. */
378
379 void
380 push_function_context ()
381 {
382 struct function *p = (struct function *) xmalloc (sizeof (struct function));
383
384 p->next = outer_function_chain;
385 outer_function_chain = p;
386
387 p->name = current_function_name;
388 p->decl = current_function_decl;
389 p->pops_args = current_function_pops_args;
390 p->returns_struct = current_function_returns_struct;
391 p->returns_pcc_struct = current_function_returns_pcc_struct;
392 p->needs_context = current_function_needs_context;
393 p->calls_setjmp = current_function_calls_setjmp;
394 p->calls_longjmp = current_function_calls_longjmp;
395 p->calls_alloca = current_function_calls_alloca;
396 p->has_nonlocal_label = current_function_has_nonlocal_label;
397 p->args_size = current_function_args_size;
398 p->pretend_args_size = current_function_pretend_args_size;
399 p->arg_offset_rtx = current_function_arg_offset_rtx;
400 p->uses_const_pool = current_function_uses_const_pool;
401 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
402 p->internal_arg_pointer = current_function_internal_arg_pointer;
403 p->max_parm_reg = max_parm_reg;
404 p->parm_reg_stack_loc = parm_reg_stack_loc;
405 p->outgoing_args_size = current_function_outgoing_args_size;
406 p->return_rtx = current_function_return_rtx;
407 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
408 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
409 p->nonlocal_labels = nonlocal_labels;
410 p->cleanup_label = cleanup_label;
411 p->return_label = return_label;
412 p->save_expr_regs = save_expr_regs;
413 p->stack_slot_list = stack_slot_list;
414 p->parm_birth_insn = parm_birth_insn;
415 p->frame_offset = frame_offset;
416 p->tail_recursion_label = tail_recursion_label;
417 p->tail_recursion_reentry = tail_recursion_reentry;
418 p->arg_pointer_save_area = arg_pointer_save_area;
419 p->rtl_expr_chain = rtl_expr_chain;
420 p->last_parm_insn = last_parm_insn;
421 p->context_display = context_display;
422 p->trampoline_list = trampoline_list;
423 p->function_call_count = function_call_count;
424 p->temp_slots = temp_slots;
425 p->temp_slot_level = temp_slot_level;
426 p->fixup_var_refs_queue = 0;
427 p->epilogue_delay_list = current_function_epilogue_delay_list;
428
429 save_tree_status (p);
430 save_storage_status (p);
431 save_emit_status (p);
432 init_emit ();
433 save_expr_status (p);
434 save_stmt_status (p);
435 save_varasm_status (p);
436 }
437
438 /* Restore the last saved context, at the end of a nested function.
439 This function is called from language-specific code. */
440
441 void
442 pop_function_context ()
443 {
444 struct function *p = outer_function_chain;
445
446 outer_function_chain = p->next;
447
448 current_function_name = p->name;
449 current_function_decl = p->decl;
450 current_function_pops_args = p->pops_args;
451 current_function_returns_struct = p->returns_struct;
452 current_function_returns_pcc_struct = p->returns_pcc_struct;
453 current_function_needs_context = p->needs_context;
454 current_function_calls_setjmp = p->calls_setjmp;
455 current_function_calls_longjmp = p->calls_longjmp;
456 current_function_calls_alloca = p->calls_alloca;
457 current_function_has_nonlocal_label = p->has_nonlocal_label;
458 current_function_contains_functions = 1;
459 current_function_args_size = p->args_size;
460 current_function_pretend_args_size = p->pretend_args_size;
461 current_function_arg_offset_rtx = p->arg_offset_rtx;
462 current_function_uses_const_pool = p->uses_const_pool;
463 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
464 current_function_internal_arg_pointer = p->internal_arg_pointer;
465 max_parm_reg = p->max_parm_reg;
466 parm_reg_stack_loc = p->parm_reg_stack_loc;
467 current_function_outgoing_args_size = p->outgoing_args_size;
468 current_function_return_rtx = p->return_rtx;
469 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
470 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
471 nonlocal_labels = p->nonlocal_labels;
472 cleanup_label = p->cleanup_label;
473 return_label = p->return_label;
474 save_expr_regs = p->save_expr_regs;
475 stack_slot_list = p->stack_slot_list;
476 parm_birth_insn = p->parm_birth_insn;
477 frame_offset = p->frame_offset;
478 tail_recursion_label = p->tail_recursion_label;
479 tail_recursion_reentry = p->tail_recursion_reentry;
480 arg_pointer_save_area = p->arg_pointer_save_area;
481 rtl_expr_chain = p->rtl_expr_chain;
482 last_parm_insn = p->last_parm_insn;
483 context_display = p->context_display;
484 trampoline_list = p->trampoline_list;
485 function_call_count = p->function_call_count;
486 temp_slots = p->temp_slots;
487 temp_slot_level = p->temp_slot_level;
488 current_function_epilogue_delay_list = p->epilogue_delay_list;
489
490 restore_tree_status (p);
491 restore_storage_status (p);
492 restore_expr_status (p);
493 restore_emit_status (p);
494 restore_stmt_status (p);
495 restore_varasm_status (p);
496
497 /* Finish doing put_var_into_stack for any of our variables
498 which became addressable during the nested function. */
499 {
500 struct var_refs_queue *queue = p->fixup_var_refs_queue;
501 for (; queue; queue = queue->next)
502 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
503 }
504
505 free (p);
506
507 /* Reset variables that have known state during rtx generation. */
508 rtx_equal_function_value_matters = 1;
509 virtuals_instantiated = 0;
510 }
511 \f
512 /* Allocate fixed slots in the stack frame of the current function. */
513
514 /* Return size needed for stack frame based on slots so far allocated.
515 This size counts from zero. It is not rounded to STACK_BOUNDARY;
516 the caller may have to do that. */
517
518 int
519 get_frame_size ()
520 {
521 #ifdef FRAME_GROWS_DOWNWARD
522 return -frame_offset;
523 #else
524 return frame_offset;
525 #endif
526 }
527
528 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
529 with machine mode MODE.
530
531 ALIGN controls the amount of alignment for the address of the slot:
532 0 means according to MODE,
533 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
534 positive specifies alignment boundary in bits.
535
536 We do not round to stack_boundary here. */
537
538 rtx
539 assign_stack_local (mode, size, align)
540 enum machine_mode mode;
541 int size;
542 int align;
543 {
544 register rtx x, addr;
545 int bigend_correction = 0;
546 int alignment;
547
548 if (align == 0)
549 {
550 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
551 if (mode == BLKmode)
552 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
553 }
554 else if (align == -1)
555 {
556 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
557 size = CEIL_ROUND (size, alignment);
558 }
559 else
560 alignment = align / BITS_PER_UNIT;
561
562 /* Round frame offset to that alignment.
563 We must be careful here, since FRAME_OFFSET might be negative and
564 division with a negative dividend isn't as well defined as we might
565 like. So we instead assume that ALIGNMENT is a power of two and
566 use logical operations which are unambiguous. */
567 #ifdef FRAME_GROWS_DOWNWARD
568 frame_offset = FLOOR_ROUND (frame_offset, alignment);
569 #else
570 frame_offset = CEIL_ROUND (frame_offset, alignment);
571 #endif
572
573 /* On a big-endian machine, if we are allocating more space than we will use,
574 use the least significant bytes of those that are allocated. */
575 #if BYTES_BIG_ENDIAN
576 if (mode != BLKmode)
577 bigend_correction = size - GET_MODE_SIZE (mode);
578 #endif
579
580 #ifdef FRAME_GROWS_DOWNWARD
581 frame_offset -= size;
582 #endif
583
584 /* If we have already instantiated virtual registers, return the actual
585 address relative to the frame pointer. */
586 if (virtuals_instantiated)
587 addr = plus_constant (frame_pointer_rtx,
588 (frame_offset + bigend_correction
589 + STARTING_FRAME_OFFSET));
590 else
591 addr = plus_constant (virtual_stack_vars_rtx,
592 frame_offset + bigend_correction);
593
594 #ifndef FRAME_GROWS_DOWNWARD
595 frame_offset += size;
596 #endif
597
598 x = gen_rtx (MEM, mode, addr);
599
600 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
601
602 return x;
603 }
604
605 /* Assign a stack slot in a containing function.
606 First three arguments are same as in preceding function.
607 The last argument specifies the function to allocate in. */
608
609 rtx
610 assign_outer_stack_local (mode, size, align, function)
611 enum machine_mode mode;
612 int size;
613 int align;
614 struct function *function;
615 {
616 register rtx x, addr;
617 int bigend_correction = 0;
618 int alignment;
619
620 /* Allocate in the memory associated with the function in whose frame
621 we are assigning. */
622 push_obstacks (function->function_obstack,
623 function->function_maybepermanent_obstack);
624
625 if (align == 0)
626 {
627 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
628 if (mode == BLKmode)
629 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
630 }
631 else if (align == -1)
632 {
633 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
634 size = CEIL_ROUND (size, alignment);
635 }
636 else
637 alignment = align / BITS_PER_UNIT;
638
639 /* Round frame offset to that alignment. */
640 #ifdef FRAME_GROWS_DOWNWARD
641 frame_offset = FLOOR_ROUND (frame_offset, alignment);
642 #else
643 frame_offset = CEIL_ROUND (frame_offset, alignment);
644 #endif
645
646 /* On a big-endian machine, if we are allocating more space than we will use,
647 use the least significant bytes of those that are allocated. */
648 #if BYTES_BIG_ENDIAN
649 if (mode != BLKmode)
650 bigend_correction = size - GET_MODE_SIZE (mode);
651 #endif
652
653 #ifdef FRAME_GROWS_DOWNWARD
654 function->frame_offset -= size;
655 #endif
656 addr = plus_constant (virtual_stack_vars_rtx,
657 function->frame_offset + bigend_correction);
658 #ifndef FRAME_GROWS_DOWNWARD
659 function->frame_offset += size;
660 #endif
661
662 x = gen_rtx (MEM, mode, addr);
663
664 function->stack_slot_list
665 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
666
667 pop_obstacks ();
668
669 return x;
670 }
671 \f
672 /* Allocate a temporary stack slot and record it for possible later
673 reuse.
674
675 MODE is the machine mode to be given to the returned rtx.
676
677 SIZE is the size in units of the space required. We do no rounding here
678 since assign_stack_local will do any required rounding.
679
680 KEEP is non-zero if this slot is to be retained after a call to
681 free_temp_slots. Automatic variables for a block are allocated with this
682 flag. */
683
684 rtx
685 assign_stack_temp (mode, size, keep)
686 enum machine_mode mode;
687 int size;
688 int keep;
689 {
690 struct temp_slot *p, *best_p = 0;
691
692 /* First try to find an available, already-allocated temporary that is the
693 exact size we require. */
694 for (p = temp_slots; p; p = p->next)
695 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
696 break;
697
698 /* If we didn't find, one, try one that is larger than what we want. We
699 find the smallest such. */
700 if (p == 0)
701 for (p = temp_slots; p; p = p->next)
702 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
703 && (best_p == 0 || best_p->size > p->size))
704 best_p = p;
705
706 /* Make our best, if any, the one to use. */
707 if (best_p)
708 p = best_p;
709
710 /* If we still didn't find one, make a new temporary. */
711 if (p == 0)
712 {
713 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
714 p->size = size;
715 /* If the temp slot mode doesn't indicate the alignment,
716 use the largest possible, so no one will be disappointed. */
717 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
718 p->next = temp_slots;
719 temp_slots = p;
720 }
721
722 p->in_use = 1;
723 p->level = temp_slot_level;
724 p->keep = keep;
725 return p->slot;
726 }
727 \f
728 /* If X could be a reference to a temporary slot, mark that slot as belonging
729 to the to one level higher. If X matched one of our slots, just mark that
730 one. Otherwise, we can't easily predict which it is, so upgrade all of
731 them. Kept slots need not be touched.
732
733 This is called when an ({...}) construct occurs and a statement
734 returns a value in memory. */
735
736 void
737 preserve_temp_slots (x)
738 rtx x;
739 {
740 struct temp_slot *p;
741
742 /* If X is not in memory or is at a constant address, it cannot be in
743 a temporary slot. */
744 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
745 return;
746
747 /* First see if we can find a match. */
748 for (p = temp_slots; p; p = p->next)
749 if (p->in_use && x == p->slot)
750 {
751 p->level--;
752 return;
753 }
754
755 /* Otherwise, preserve all non-kept slots at this level. */
756 for (p = temp_slots; p; p = p->next)
757 if (p->in_use && p->level == temp_slot_level && ! p->keep)
758 p->level--;
759 }
760
761 /* Free all temporaries used so far. This is normally called at the end
762 of generating code for a statement. */
763
764 void
765 free_temp_slots ()
766 {
767 struct temp_slot *p;
768
769 for (p = temp_slots; p; p = p->next)
770 if (p->in_use && p->level == temp_slot_level && ! p->keep)
771 p->in_use = 0;
772 }
773
774 /* Push deeper into the nesting level for stack temporaries. */
775
776 void
777 push_temp_slots ()
778 {
779 /* For GNU C++, we must allow a sequence to be emitted anywhere in
780 the level where the sequence was started. By not changing levels
781 when the compiler is inside a sequence, the temporaries for the
782 sequence and the temporaries will not unwittingly conflict with
783 the temporaries for other sequences and/or code at that level. */
784 if (in_sequence_p ())
785 return;
786
787 temp_slot_level++;
788 }
789
790 /* Pop a temporary nesting level. All slots in use in the current level
791 are freed. */
792
793 void
794 pop_temp_slots ()
795 {
796 struct temp_slot *p;
797
798 /* See comment in push_temp_slots about why we don't change levels
799 in sequences. */
800 if (in_sequence_p ())
801 return;
802
803 for (p = temp_slots; p; p = p->next)
804 if (p->in_use && p->level == temp_slot_level)
805 p->in_use = 0;
806
807 temp_slot_level--;
808 }
809 \f
810 /* Retroactively move an auto variable from a register to a stack slot.
811 This is done when an address-reference to the variable is seen. */
812
813 void
814 put_var_into_stack (decl)
815 tree decl;
816 {
817 register rtx reg;
818 register rtx new = 0;
819 enum machine_mode promoted_mode, decl_mode;
820 struct function *function = 0;
821 tree context = decl_function_context (decl);
822
823 /* Get the current rtl used for this object and it's original mode. */
824 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
825
826 /* No need to do anything if decl has no rtx yet
827 since in that case caller is setting TREE_ADDRESSABLE
828 and a stack slot will be assigned when the rtl is made. */
829 if (reg == 0)
830 return;
831
832 /* Get the declared mode for this object. */
833 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
834 : DECL_MODE (decl));
835 /* Get the mode it's actually stored in. */
836 promoted_mode = GET_MODE (reg);
837
838 /* If this variable comes from an outer function,
839 find that function's saved context. */
840 if (context != current_function_decl)
841 for (function = outer_function_chain; function; function = function->next)
842 if (function->decl == context)
843 break;
844
845 /* If this is a variable-size object with a pseudo to address it,
846 put that pseudo into the stack, if the var is nonlocal. */
847 if (DECL_NONLOCAL (decl)
848 && GET_CODE (reg) == MEM
849 && GET_CODE (XEXP (reg, 0)) == REG
850 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
851 reg = XEXP (reg, 0);
852 if (GET_CODE (reg) != REG)
853 return;
854
855 if (function)
856 {
857 if (REGNO (reg) < function->max_parm_reg)
858 new = function->parm_reg_stack_loc[REGNO (reg)];
859 if (new == 0)
860 new = assign_outer_stack_local (GET_MODE (reg),
861 GET_MODE_SIZE (decl_mode),
862 0, function);
863 }
864 else
865 {
866 if (REGNO (reg) < max_parm_reg)
867 new = parm_reg_stack_loc[REGNO (reg)];
868 if (new == 0)
869 new = assign_stack_local (GET_MODE (reg),
870 GET_MODE_SIZE (decl_mode), 0);
871 }
872
873 XEXP (reg, 0) = XEXP (new, 0);
874 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
875 REG_USERVAR_P (reg) = 0;
876 PUT_CODE (reg, MEM);
877 PUT_MODE (reg, decl_mode);
878
879 /* If this is a memory ref that contains aggregate components,
880 mark it as such for cse and loop optimize. */
881 MEM_IN_STRUCT_P (reg)
882 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
883 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
884 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
885
886 /* Now make sure that all refs to the variable, previously made
887 when it was a register, are fixed up to be valid again. */
888 if (function)
889 {
890 struct var_refs_queue *temp;
891
892 /* Variable is inherited; fix it up when we get back to its function. */
893 push_obstacks (function->function_obstack,
894 function->function_maybepermanent_obstack);
895 temp
896 = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue));
897 temp->modified = reg;
898 temp->promoted_mode = promoted_mode;
899 temp->unsignedp = TREE_UNSIGNED (TREE_TYPE (decl));
900 temp->next = function->fixup_var_refs_queue;
901 function->fixup_var_refs_queue = temp;
902 pop_obstacks ();
903 }
904 else
905 /* Variable is local; fix it up now. */
906 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (TREE_TYPE (decl)));
907 }
908 \f
909 static void
910 fixup_var_refs (var, promoted_mode, unsignedp)
911 rtx var;
912 enum machine_mode promoted_mode;
913 int unsignedp;
914 {
915 tree pending;
916 rtx first_insn = get_insns ();
917 struct sequence_stack *stack = sequence_stack;
918 tree rtl_exps = rtl_expr_chain;
919
920 /* Must scan all insns for stack-refs that exceed the limit. */
921 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
922
923 /* Scan all pending sequences too. */
924 for (; stack; stack = stack->next)
925 {
926 push_to_sequence (stack->first);
927 fixup_var_refs_insns (var, promoted_mode, unsignedp,
928 stack->first, stack->next != 0);
929 /* Update remembered end of sequence
930 in case we added an insn at the end. */
931 stack->last = get_last_insn ();
932 end_sequence ();
933 }
934
935 /* Scan all waiting RTL_EXPRs too. */
936 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
937 {
938 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
939 if (seq != const0_rtx && seq != 0)
940 {
941 push_to_sequence (seq);
942 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
943 end_sequence ();
944 }
945 }
946 }
947 \f
948 /* This structure is used by the following two functions to record MEMs or
949 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
950 VAR as an address. We need to maintain this list in case two operands of
951 an insn were required to match; in that case we must ensure we use the
952 same replacement. */
953
954 struct fixup_replacement
955 {
956 rtx old;
957 rtx new;
958 struct fixup_replacement *next;
959 };
960
961 /* REPLACEMENTS is a pointer to a list of the above structures and X is
962 some part of an insn. Return a struct fixup_replacement whose OLD
963 value is equal to X. Allocate a new structure if no such entry exists. */
964
965 static struct fixup_replacement *
966 find_replacement (replacements, x)
967 struct fixup_replacement **replacements;
968 rtx x;
969 {
970 struct fixup_replacement *p;
971
972 /* See if we have already replaced this. */
973 for (p = *replacements; p && p->old != x; p = p->next)
974 ;
975
976 if (p == 0)
977 {
978 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
979 p->old = x;
980 p->new = 0;
981 p->next = *replacements;
982 *replacements = p;
983 }
984
985 return p;
986 }
987
988 /* Scan the insn-chain starting with INSN for refs to VAR
989 and fix them up. TOPLEVEL is nonzero if this chain is the
990 main chain of insns for the current function. */
991
992 static void
993 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
994 rtx var;
995 enum machine_mode promoted_mode;
996 int unsignedp;
997 rtx insn;
998 int toplevel;
999 {
1000 while (insn)
1001 {
1002 rtx next = NEXT_INSN (insn);
1003 rtx note;
1004 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1005 || GET_CODE (insn) == JUMP_INSN)
1006 {
1007 /* The insn to load VAR from a home in the arglist
1008 is now a no-op. When we see it, just delete it. */
1009 if (toplevel
1010 && GET_CODE (PATTERN (insn)) == SET
1011 && SET_DEST (PATTERN (insn)) == var
1012 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1013 {
1014 next = delete_insn (insn);
1015 if (insn == last_parm_insn)
1016 last_parm_insn = PREV_INSN (next);
1017 }
1018 else
1019 {
1020 /* See if we have to do anything to INSN now that VAR is in
1021 memory. If it needs to be loaded into a pseudo, use a single
1022 pseudo for the entire insn in case there is a MATCH_DUP
1023 between two operands. We pass a pointer to the head of
1024 a list of struct fixup_replacements. If fixup_var_refs_1
1025 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1026 it will record them in this list.
1027
1028 If it allocated a pseudo for any replacement, we copy into
1029 it here. */
1030
1031 struct fixup_replacement *replacements = 0;
1032
1033 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1034 &replacements);
1035
1036 while (replacements)
1037 {
1038 if (GET_CODE (replacements->new) == REG)
1039 {
1040 rtx insert_before;
1041 rtx seq;
1042
1043 /* OLD might be a (subreg (mem)). */
1044 if (GET_CODE (replacements->old) == SUBREG)
1045 replacements->old
1046 = fixup_memory_subreg (replacements->old, insn, 0);
1047 else
1048 replacements->old
1049 = fixup_stack_1 (replacements->old, insn);
1050
1051 /* We can not separate USE insns from the CALL_INSN
1052 that they belong to. If this is a CALL_INSN, insert
1053 the move insn before the USE insns preceding it
1054 instead of immediately before the insn. */
1055 if (GET_CODE (insn) == CALL_INSN)
1056 {
1057 insert_before = insn;
1058 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1059 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1060 insert_before = PREV_INSN (insert_before);
1061 }
1062 else
1063 insert_before = insn;
1064
1065 /* If we are changing the mode, do a conversion.
1066 This might be wasteful, but combine.c will
1067 eliminate much of the waste. */
1068
1069 if (GET_MODE (replacements->new)
1070 != GET_MODE (replacements->old))
1071 {
1072 start_sequence ();
1073 convert_move (replacements->new,
1074 replacements->old, unsignedp);
1075 seq = gen_sequence ();
1076 end_sequence ();
1077 }
1078 else
1079 seq = gen_move_insn (replacements->new,
1080 replacements->old);
1081
1082 emit_insn_before (seq, insert_before);
1083 }
1084
1085 replacements = replacements->next;
1086 }
1087 }
1088
1089 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1090 But don't touch other insns referred to by reg-notes;
1091 we will get them elsewhere. */
1092 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1093 if (GET_CODE (note) != INSN_LIST)
1094 XEXP (note, 0) = walk_fixup_memory_subreg (XEXP (note, 0), insn);
1095 }
1096 insn = next;
1097 }
1098 }
1099 \f
1100 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1101 See if the rtx expression at *LOC in INSN needs to be changed.
1102
1103 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1104 contain a list of original rtx's and replacements. If we find that we need
1105 to modify this insn by replacing a memory reference with a pseudo or by
1106 making a new MEM to implement a SUBREG, we consult that list to see if
1107 we have already chosen a replacement. If none has already been allocated,
1108 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1109 or the SUBREG, as appropriate, to the pseudo. */
1110
1111 static void
1112 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1113 register rtx var;
1114 enum machine_mode promoted_mode;
1115 register rtx *loc;
1116 rtx insn;
1117 struct fixup_replacement **replacements;
1118 {
1119 register int i;
1120 register rtx x = *loc;
1121 RTX_CODE code = GET_CODE (x);
1122 register char *fmt;
1123 register rtx tem, tem1;
1124 struct fixup_replacement *replacement;
1125
1126 switch (code)
1127 {
1128 case MEM:
1129 if (var == x)
1130 {
1131 /* If we already have a replacement, use it. Otherwise,
1132 try to fix up this address in case it is invalid. */
1133
1134 replacement = find_replacement (replacements, var);
1135 if (replacement->new)
1136 {
1137 *loc = replacement->new;
1138 return;
1139 }
1140
1141 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1142
1143 /* Unless we are forcing memory to register or we changed the mode,
1144 we can leave things the way they are if the insn is valid. */
1145
1146 INSN_CODE (insn) = -1;
1147 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1148 && recog_memoized (insn) >= 0)
1149 return;
1150
1151 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1152 return;
1153 }
1154
1155 /* If X contains VAR, we need to unshare it here so that we update
1156 each occurrence separately. But all identical MEMs in one insn
1157 must be replaced with the same rtx because of the possibility of
1158 MATCH_DUPs. */
1159
1160 if (reg_mentioned_p (var, x))
1161 {
1162 replacement = find_replacement (replacements, x);
1163 if (replacement->new == 0)
1164 replacement->new = copy_most_rtx (x, var);
1165
1166 *loc = x = replacement->new;
1167 }
1168 break;
1169
1170 case REG:
1171 case CC0:
1172 case PC:
1173 case CONST_INT:
1174 case CONST:
1175 case SYMBOL_REF:
1176 case LABEL_REF:
1177 case CONST_DOUBLE:
1178 return;
1179
1180 case SIGN_EXTRACT:
1181 case ZERO_EXTRACT:
1182 /* Note that in some cases those types of expressions are altered
1183 by optimize_bit_field, and do not survive to get here. */
1184 if (XEXP (x, 0) == var
1185 || (GET_CODE (XEXP (x, 0)) == SUBREG
1186 && SUBREG_REG (XEXP (x, 0)) == var))
1187 {
1188 /* Get TEM as a valid MEM in the mode presently in the insn.
1189
1190 We don't worry about the possibility of MATCH_DUP here; it
1191 is highly unlikely and would be tricky to handle. */
1192
1193 tem = XEXP (x, 0);
1194 if (GET_CODE (tem) == SUBREG)
1195 tem = fixup_memory_subreg (tem, insn, 1);
1196 tem = fixup_stack_1 (tem, insn);
1197
1198 /* Unless we want to load from memory, get TEM into the proper mode
1199 for an extract from memory. This can only be done if the
1200 extract is at a constant position and length. */
1201
1202 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1203 && GET_CODE (XEXP (x, 2)) == CONST_INT
1204 && ! mode_dependent_address_p (XEXP (tem, 0))
1205 && ! MEM_VOLATILE_P (tem))
1206 {
1207 enum machine_mode wanted_mode = VOIDmode;
1208 enum machine_mode is_mode = GET_MODE (tem);
1209 int width = INTVAL (XEXP (x, 1));
1210 int pos = INTVAL (XEXP (x, 2));
1211
1212 #ifdef HAVE_extzv
1213 if (GET_CODE (x) == ZERO_EXTRACT)
1214 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1215 #endif
1216 #ifdef HAVE_extv
1217 if (GET_CODE (x) == SIGN_EXTRACT)
1218 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1219 #endif
1220 /* If we have a narrower mode, we can do something. */
1221 if (wanted_mode != VOIDmode
1222 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1223 {
1224 int offset = pos / BITS_PER_UNIT;
1225 rtx old_pos = XEXP (x, 2);
1226 rtx newmem;
1227
1228 /* If the bytes and bits are counted differently, we
1229 must adjust the offset. */
1230 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1231 offset = (GET_MODE_SIZE (is_mode)
1232 - GET_MODE_SIZE (wanted_mode) - offset);
1233 #endif
1234
1235 pos %= GET_MODE_BITSIZE (wanted_mode);
1236
1237 newmem = gen_rtx (MEM, wanted_mode,
1238 plus_constant (XEXP (tem, 0), offset));
1239 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1240 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1241 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1242
1243 /* Make the change and see if the insn remains valid. */
1244 INSN_CODE (insn) = -1;
1245 XEXP (x, 0) = newmem;
1246 XEXP (x, 2) = GEN_INT (pos);
1247
1248 if (recog_memoized (insn) >= 0)
1249 return;
1250
1251 /* Otherwise, restore old position. XEXP (x, 0) will be
1252 restored later. */
1253 XEXP (x, 2) = old_pos;
1254 }
1255 }
1256
1257 /* If we get here, the bitfield extract insn can't accept a memory
1258 reference. Copy the input into a register. */
1259
1260 tem1 = gen_reg_rtx (GET_MODE (tem));
1261 emit_insn_before (gen_move_insn (tem1, tem), insn);
1262 XEXP (x, 0) = tem1;
1263 return;
1264 }
1265 break;
1266
1267 case SUBREG:
1268 if (SUBREG_REG (x) == var)
1269 {
1270 /* If this is a special SUBREG made because VAR was promoted
1271 from a wider mode, replace it with VAR and call ourself
1272 recursively, this time saying that the object previously
1273 had its current mode (by virtue of the SUBREG). */
1274
1275 if (SUBREG_PROMOTED_VAR_P (x))
1276 {
1277 *loc = var;
1278 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1279 return;
1280 }
1281
1282 /* If this SUBREG makes VAR wider, it has become a paradoxical
1283 SUBREG with VAR in memory, but these aren't allowed at this
1284 stage of the compilation. So load VAR into a pseudo and take
1285 a SUBREG of that pseudo. */
1286 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1287 {
1288 replacement = find_replacement (replacements, var);
1289 if (replacement->new == 0)
1290 replacement->new = gen_reg_rtx (GET_MODE (var));
1291 SUBREG_REG (x) = replacement->new;
1292 return;
1293 }
1294
1295 /* See if we have already found a replacement for this SUBREG.
1296 If so, use it. Otherwise, make a MEM and see if the insn
1297 is recognized. If not, or if we should force MEM into a register,
1298 make a pseudo for this SUBREG. */
1299 replacement = find_replacement (replacements, x);
1300 if (replacement->new)
1301 {
1302 *loc = replacement->new;
1303 return;
1304 }
1305
1306 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1307
1308 if (! flag_force_mem && recog_memoized (insn) >= 0)
1309 return;
1310
1311 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1312 return;
1313 }
1314 break;
1315
1316 case SET:
1317 /* First do special simplification of bit-field references. */
1318 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1319 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1320 optimize_bit_field (x, insn, 0);
1321 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1322 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1323 optimize_bit_field (x, insn, NULL_PTR);
1324
1325 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1326 insn into a pseudo and store the low part of the pseudo into VAR. */
1327 if (GET_CODE (SET_DEST (x)) == SUBREG
1328 && SUBREG_REG (SET_DEST (x)) == var
1329 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1330 > GET_MODE_SIZE (GET_MODE (var))))
1331 {
1332 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1333 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1334 tem)),
1335 insn);
1336 break;
1337 }
1338
1339 {
1340 rtx dest = SET_DEST (x);
1341 rtx src = SET_SRC (x);
1342 rtx outerdest = dest;
1343
1344 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1345 || GET_CODE (dest) == SIGN_EXTRACT
1346 || GET_CODE (dest) == ZERO_EXTRACT)
1347 dest = XEXP (dest, 0);
1348
1349 if (GET_CODE (src) == SUBREG)
1350 src = XEXP (src, 0);
1351
1352 /* If VAR does not appear at the top level of the SET
1353 just scan the lower levels of the tree. */
1354
1355 if (src != var && dest != var)
1356 break;
1357
1358 /* We will need to rerecognize this insn. */
1359 INSN_CODE (insn) = -1;
1360
1361 #ifdef HAVE_insv
1362 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1363 {
1364 /* Since this case will return, ensure we fixup all the
1365 operands here. */
1366 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1367 insn, replacements);
1368 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1369 insn, replacements);
1370 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1371 insn, replacements);
1372
1373 tem = XEXP (outerdest, 0);
1374
1375 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1376 that may appear inside a ZERO_EXTRACT.
1377 This was legitimate when the MEM was a REG. */
1378 if (GET_CODE (tem) == SUBREG
1379 && SUBREG_REG (tem) == var)
1380 tem = fixup_memory_subreg (tem, insn, 1);
1381 else
1382 tem = fixup_stack_1 (tem, insn);
1383
1384 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1385 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1386 && ! mode_dependent_address_p (XEXP (tem, 0))
1387 && ! MEM_VOLATILE_P (tem))
1388 {
1389 enum machine_mode wanted_mode
1390 = insn_operand_mode[(int) CODE_FOR_insv][0];
1391 enum machine_mode is_mode = GET_MODE (tem);
1392 int width = INTVAL (XEXP (outerdest, 1));
1393 int pos = INTVAL (XEXP (outerdest, 2));
1394
1395 /* If we have a narrower mode, we can do something. */
1396 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1397 {
1398 int offset = pos / BITS_PER_UNIT;
1399 rtx old_pos = XEXP (outerdest, 2);
1400 rtx newmem;
1401
1402 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1403 offset = (GET_MODE_SIZE (is_mode)
1404 - GET_MODE_SIZE (wanted_mode) - offset);
1405 #endif
1406
1407 pos %= GET_MODE_BITSIZE (wanted_mode);
1408
1409 newmem = gen_rtx (MEM, wanted_mode,
1410 plus_constant (XEXP (tem, 0), offset));
1411 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1412 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1413 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1414
1415 /* Make the change and see if the insn remains valid. */
1416 INSN_CODE (insn) = -1;
1417 XEXP (outerdest, 0) = newmem;
1418 XEXP (outerdest, 2) = GEN_INT (pos);
1419
1420 if (recog_memoized (insn) >= 0)
1421 return;
1422
1423 /* Otherwise, restore old position. XEXP (x, 0) will be
1424 restored later. */
1425 XEXP (outerdest, 2) = old_pos;
1426 }
1427 }
1428
1429 /* If we get here, the bit-field store doesn't allow memory
1430 or isn't located at a constant position. Load the value into
1431 a register, do the store, and put it back into memory. */
1432
1433 tem1 = gen_reg_rtx (GET_MODE (tem));
1434 emit_insn_before (gen_move_insn (tem1, tem), insn);
1435 emit_insn_after (gen_move_insn (tem, tem1), insn);
1436 XEXP (outerdest, 0) = tem1;
1437 return;
1438 }
1439 #endif
1440
1441 /* STRICT_LOW_PART is a no-op on memory references
1442 and it can cause combinations to be unrecognizable,
1443 so eliminate it. */
1444
1445 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1446 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1447
1448 /* A valid insn to copy VAR into or out of a register
1449 must be left alone, to avoid an infinite loop here.
1450 If the reference to VAR is by a subreg, fix that up,
1451 since SUBREG is not valid for a memref.
1452 Also fix up the address of the stack slot. */
1453
1454 if ((SET_SRC (x) == var
1455 || (GET_CODE (SET_SRC (x)) == SUBREG
1456 && SUBREG_REG (SET_SRC (x)) == var))
1457 && (GET_CODE (SET_DEST (x)) == REG
1458 || (GET_CODE (SET_DEST (x)) == SUBREG
1459 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1460 && recog_memoized (insn) >= 0)
1461 {
1462 replacement = find_replacement (replacements, SET_SRC (x));
1463 if (replacement->new)
1464 {
1465 SET_SRC (x) = replacement->new;
1466 return;
1467 }
1468 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1469 SET_SRC (x) = replacement->new
1470 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1471 else
1472 SET_SRC (x) = replacement->new
1473 = fixup_stack_1 (SET_SRC (x), insn);
1474 return;
1475 }
1476
1477 if ((SET_DEST (x) == var
1478 || (GET_CODE (SET_DEST (x)) == SUBREG
1479 && SUBREG_REG (SET_DEST (x)) == var))
1480 && (GET_CODE (SET_SRC (x)) == REG
1481 || (GET_CODE (SET_SRC (x)) == SUBREG
1482 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1483 && recog_memoized (insn) >= 0)
1484 {
1485 if (GET_CODE (SET_DEST (x)) == SUBREG)
1486 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1487 else
1488 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1489 return;
1490 }
1491
1492 /* Otherwise, storing into VAR must be handled specially
1493 by storing into a temporary and copying that into VAR
1494 with a new insn after this one. Note that this case
1495 will be used when storing into a promoted scalar since
1496 the insn will now have different modes on the input
1497 and output and hence will be invalid (except for the case
1498 of setting it to a constant, which does not need any
1499 change if it is valid). We generate extra code in that case,
1500 but combine.c will eliminate it. */
1501
1502 if (dest == var)
1503 {
1504 rtx temp;
1505 rtx fixeddest = SET_DEST (x);
1506
1507 /* STRICT_LOW_PART can be discarded, around a MEM. */
1508 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
1509 fixeddest = XEXP (fixeddest, 0);
1510 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
1511 if (GET_CODE (fixeddest) == SUBREG)
1512 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
1513 else
1514 fixeddest = fixup_stack_1 (fixeddest, insn);
1515
1516 temp = gen_reg_rtx (GET_MODE (SET_SRC (x)) == VOIDmode
1517 ? GET_MODE (fixeddest)
1518 : GET_MODE (SET_SRC (x)));
1519
1520 emit_insn_after (gen_move_insn (fixeddest,
1521 gen_lowpart (GET_MODE (fixeddest),
1522 temp)),
1523 insn);
1524
1525 SET_DEST (x) = temp;
1526 }
1527 }
1528 }
1529
1530 /* Nothing special about this RTX; fix its operands. */
1531
1532 fmt = GET_RTX_FORMAT (code);
1533 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1534 {
1535 if (fmt[i] == 'e')
1536 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
1537 if (fmt[i] == 'E')
1538 {
1539 register int j;
1540 for (j = 0; j < XVECLEN (x, i); j++)
1541 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
1542 insn, replacements);
1543 }
1544 }
1545 }
1546 \f
1547 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1548 return an rtx (MEM:m1 newaddr) which is equivalent.
1549 If any insns must be emitted to compute NEWADDR, put them before INSN.
1550
1551 UNCRITICAL nonzero means accept paradoxical subregs.
1552 This is used for subregs found inside of ZERO_EXTRACTs. */
1553
1554 static rtx
1555 fixup_memory_subreg (x, insn, uncritical)
1556 rtx x;
1557 rtx insn;
1558 int uncritical;
1559 {
1560 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1561 rtx addr = XEXP (SUBREG_REG (x), 0);
1562 enum machine_mode mode = GET_MODE (x);
1563 rtx saved, result;
1564
1565 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1566 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1567 && ! uncritical)
1568 abort ();
1569
1570 #if BYTES_BIG_ENDIAN
1571 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1572 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1573 #endif
1574 addr = plus_constant (addr, offset);
1575 if (!flag_force_addr && memory_address_p (mode, addr))
1576 /* Shortcut if no insns need be emitted. */
1577 return change_address (SUBREG_REG (x), mode, addr);
1578 start_sequence ();
1579 result = change_address (SUBREG_REG (x), mode, addr);
1580 emit_insn_before (gen_sequence (), insn);
1581 end_sequence ();
1582 return result;
1583 }
1584
1585 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1586 Replace subexpressions of X in place.
1587 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1588 Otherwise return X, with its contents possibly altered.
1589
1590 If any insns must be emitted to compute NEWADDR, put them before INSN. */
1591
1592 static rtx
1593 walk_fixup_memory_subreg (x, insn)
1594 register rtx x;
1595 rtx insn;
1596 {
1597 register enum rtx_code code;
1598 register char *fmt;
1599 register int i;
1600
1601 if (x == 0)
1602 return 0;
1603
1604 code = GET_CODE (x);
1605
1606 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
1607 return fixup_memory_subreg (x, insn, 0);
1608
1609 /* Nothing special about this RTX; fix its operands. */
1610
1611 fmt = GET_RTX_FORMAT (code);
1612 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1613 {
1614 if (fmt[i] == 'e')
1615 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn);
1616 if (fmt[i] == 'E')
1617 {
1618 register int j;
1619 for (j = 0; j < XVECLEN (x, i); j++)
1620 XVECEXP (x, i, j)
1621 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn);
1622 }
1623 }
1624 return x;
1625 }
1626 \f
1627 #if 0
1628 /* Fix up any references to stack slots that are invalid memory addresses
1629 because they exceed the maximum range of a displacement. */
1630
1631 void
1632 fixup_stack_slots ()
1633 {
1634 register rtx insn;
1635
1636 /* Did we generate a stack slot that is out of range
1637 or otherwise has an invalid address? */
1638 if (invalid_stack_slot)
1639 {
1640 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1641 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1642 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1643 || GET_CODE (insn) == JUMP_INSN)
1644 fixup_stack_1 (PATTERN (insn), insn);
1645 }
1646 }
1647 #endif
1648
1649 /* For each memory ref within X, if it refers to a stack slot
1650 with an out of range displacement, put the address in a temp register
1651 (emitting new insns before INSN to load these registers)
1652 and alter the memory ref to use that register.
1653 Replace each such MEM rtx with a copy, to avoid clobberage. */
1654
1655 static rtx
1656 fixup_stack_1 (x, insn)
1657 rtx x;
1658 rtx insn;
1659 {
1660 register int i;
1661 register RTX_CODE code = GET_CODE (x);
1662 register char *fmt;
1663
1664 if (code == MEM)
1665 {
1666 register rtx ad = XEXP (x, 0);
1667 /* If we have address of a stack slot but it's not valid
1668 (displacement is too large), compute the sum in a register. */
1669 if (GET_CODE (ad) == PLUS
1670 && GET_CODE (XEXP (ad, 0)) == REG
1671 && REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1672 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER
1673 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1674 {
1675 rtx temp, seq;
1676 if (memory_address_p (GET_MODE (x), ad))
1677 return x;
1678
1679 start_sequence ();
1680 temp = copy_to_reg (ad);
1681 seq = gen_sequence ();
1682 end_sequence ();
1683 emit_insn_before (seq, insn);
1684 return change_address (x, VOIDmode, temp);
1685 }
1686 return x;
1687 }
1688
1689 fmt = GET_RTX_FORMAT (code);
1690 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1691 {
1692 if (fmt[i] == 'e')
1693 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1694 if (fmt[i] == 'E')
1695 {
1696 register int j;
1697 for (j = 0; j < XVECLEN (x, i); j++)
1698 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
1699 }
1700 }
1701 return x;
1702 }
1703 \f
1704 /* Optimization: a bit-field instruction whose field
1705 happens to be a byte or halfword in memory
1706 can be changed to a move instruction.
1707
1708 We call here when INSN is an insn to examine or store into a bit-field.
1709 BODY is the SET-rtx to be altered.
1710
1711 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1712 (Currently this is called only from function.c, and EQUIV_MEM
1713 is always 0.) */
1714
1715 static void
1716 optimize_bit_field (body, insn, equiv_mem)
1717 rtx body;
1718 rtx insn;
1719 rtx *equiv_mem;
1720 {
1721 register rtx bitfield;
1722 int destflag;
1723 rtx seq = 0;
1724 enum machine_mode mode;
1725
1726 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
1727 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
1728 bitfield = SET_DEST (body), destflag = 1;
1729 else
1730 bitfield = SET_SRC (body), destflag = 0;
1731
1732 /* First check that the field being stored has constant size and position
1733 and is in fact a byte or halfword suitably aligned. */
1734
1735 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
1736 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
1737 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
1738 != BLKmode)
1739 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
1740 {
1741 register rtx memref = 0;
1742
1743 /* Now check that the containing word is memory, not a register,
1744 and that it is safe to change the machine mode. */
1745
1746 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
1747 memref = XEXP (bitfield, 0);
1748 else if (GET_CODE (XEXP (bitfield, 0)) == REG
1749 && equiv_mem != 0)
1750 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
1751 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1752 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
1753 memref = SUBREG_REG (XEXP (bitfield, 0));
1754 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1755 && equiv_mem != 0
1756 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
1757 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
1758
1759 if (memref
1760 && ! mode_dependent_address_p (XEXP (memref, 0))
1761 && ! MEM_VOLATILE_P (memref))
1762 {
1763 /* Now adjust the address, first for any subreg'ing
1764 that we are now getting rid of,
1765 and then for which byte of the word is wanted. */
1766
1767 register int offset = INTVAL (XEXP (bitfield, 2));
1768 /* Adjust OFFSET to count bits from low-address byte. */
1769 #if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1770 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
1771 - offset - INTVAL (XEXP (bitfield, 1)));
1772 #endif
1773 /* Adjust OFFSET to count bytes from low-address byte. */
1774 offset /= BITS_PER_UNIT;
1775 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
1776 {
1777 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
1778 #if BYTES_BIG_ENDIAN
1779 offset -= (MIN (UNITS_PER_WORD,
1780 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
1781 - MIN (UNITS_PER_WORD,
1782 GET_MODE_SIZE (GET_MODE (memref))));
1783 #endif
1784 }
1785
1786 memref = change_address (memref, mode,
1787 plus_constant (XEXP (memref, 0), offset));
1788
1789 /* Store this memory reference where
1790 we found the bit field reference. */
1791
1792 if (destflag)
1793 {
1794 validate_change (insn, &SET_DEST (body), memref, 1);
1795 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
1796 {
1797 rtx src = SET_SRC (body);
1798 while (GET_CODE (src) == SUBREG
1799 && SUBREG_WORD (src) == 0)
1800 src = SUBREG_REG (src);
1801 if (GET_MODE (src) != GET_MODE (memref))
1802 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
1803 validate_change (insn, &SET_SRC (body), src, 1);
1804 }
1805 else if (GET_MODE (SET_SRC (body)) != VOIDmode
1806 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
1807 /* This shouldn't happen because anything that didn't have
1808 one of these modes should have got converted explicitly
1809 and then referenced through a subreg.
1810 This is so because the original bit-field was
1811 handled by agg_mode and so its tree structure had
1812 the same mode that memref now has. */
1813 abort ();
1814 }
1815 else
1816 {
1817 rtx dest = SET_DEST (body);
1818
1819 while (GET_CODE (dest) == SUBREG
1820 && SUBREG_WORD (dest) == 0)
1821 dest = SUBREG_REG (dest);
1822
1823 validate_change (insn, &SET_DEST (body), dest, 1);
1824
1825 if (GET_MODE (dest) == GET_MODE (memref))
1826 validate_change (insn, &SET_SRC (body), memref, 1);
1827 else
1828 {
1829 /* Convert the mem ref to the destination mode. */
1830 rtx newreg = gen_reg_rtx (GET_MODE (dest));
1831
1832 start_sequence ();
1833 convert_move (newreg, memref,
1834 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
1835 seq = get_insns ();
1836 end_sequence ();
1837
1838 validate_change (insn, &SET_SRC (body), newreg, 1);
1839 }
1840 }
1841
1842 /* See if we can convert this extraction or insertion into
1843 a simple move insn. We might not be able to do so if this
1844 was, for example, part of a PARALLEL.
1845
1846 If we succeed, write out any needed conversions. If we fail,
1847 it is hard to guess why we failed, so don't do anything
1848 special; just let the optimization be suppressed. */
1849
1850 if (apply_change_group () && seq)
1851 emit_insns_before (seq, insn);
1852 }
1853 }
1854 }
1855 \f
1856 /* These routines are responsible for converting virtual register references
1857 to the actual hard register references once RTL generation is complete.
1858
1859 The following four variables are used for communication between the
1860 routines. They contain the offsets of the virtual registers from their
1861 respective hard registers. */
1862
1863 static int in_arg_offset;
1864 static int var_offset;
1865 static int dynamic_offset;
1866 static int out_arg_offset;
1867
1868 /* In most machines, the stack pointer register is equivalent to the bottom
1869 of the stack. */
1870
1871 #ifndef STACK_POINTER_OFFSET
1872 #define STACK_POINTER_OFFSET 0
1873 #endif
1874
1875 /* If not defined, pick an appropriate default for the offset of dynamically
1876 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1877 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1878
1879 #ifndef STACK_DYNAMIC_OFFSET
1880
1881 #ifdef ACCUMULATE_OUTGOING_ARGS
1882 /* The bottom of the stack points to the actual arguments. If
1883 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1884 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1885 stack space for register parameters is not pushed by the caller, but
1886 rather part of the fixed stack areas and hence not included in
1887 `current_function_outgoing_args_size'. Nevertheless, we must allow
1888 for it when allocating stack dynamic objects. */
1889
1890 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1891 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1892 (current_function_outgoing_args_size \
1893 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
1894
1895 #else
1896 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1897 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
1898 #endif
1899
1900 #else
1901 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
1902 #endif
1903 #endif
1904
1905 /* Pass through the INSNS of function FNDECL and convert virtual register
1906 references to hard register references. */
1907
1908 void
1909 instantiate_virtual_regs (fndecl, insns)
1910 tree fndecl;
1911 rtx insns;
1912 {
1913 rtx insn;
1914
1915 /* Compute the offsets to use for this function. */
1916 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
1917 var_offset = STARTING_FRAME_OFFSET;
1918 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
1919 out_arg_offset = STACK_POINTER_OFFSET;
1920
1921 /* Scan all variables and parameters of this function. For each that is
1922 in memory, instantiate all virtual registers if the result is a valid
1923 address. If not, we do it later. That will handle most uses of virtual
1924 regs on many machines. */
1925 instantiate_decls (fndecl, 1);
1926
1927 /* Initialize recognition, indicating that volatile is OK. */
1928 init_recog ();
1929
1930 /* Scan through all the insns, instantiating every virtual register still
1931 present. */
1932 for (insn = insns; insn; insn = NEXT_INSN (insn))
1933 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1934 || GET_CODE (insn) == CALL_INSN)
1935 {
1936 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1937 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
1938 }
1939
1940 /* Now instantiate the remaining register equivalences for debugging info.
1941 These will not be valid addresses. */
1942 instantiate_decls (fndecl, 0);
1943
1944 /* Indicate that, from now on, assign_stack_local should use
1945 frame_pointer_rtx. */
1946 virtuals_instantiated = 1;
1947 }
1948
1949 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1950 all virtual registers in their DECL_RTL's.
1951
1952 If VALID_ONLY, do this only if the resulting address is still valid.
1953 Otherwise, always do it. */
1954
1955 static void
1956 instantiate_decls (fndecl, valid_only)
1957 tree fndecl;
1958 int valid_only;
1959 {
1960 tree decl;
1961
1962 if (DECL_INLINE (fndecl))
1963 /* When compiling an inline function, the obstack used for
1964 rtl allocation is the maybepermanent_obstack. Calling
1965 `resume_temporary_allocation' switches us back to that
1966 obstack while we process this function's parameters. */
1967 resume_temporary_allocation ();
1968
1969 /* Process all parameters of the function. */
1970 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1971 {
1972 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
1973 valid_only);
1974 instantiate_decl (DECL_INCOMING_RTL (decl),
1975 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
1976 }
1977
1978 /* Now process all variables defined in the function or its subblocks. */
1979 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1980
1981 if (DECL_INLINE (fndecl))
1982 {
1983 /* Save all rtl allocated for this function by raising the
1984 high-water mark on the maybepermanent_obstack. */
1985 preserve_data ();
1986 /* All further rtl allocation is now done in the current_obstack. */
1987 rtl_in_current_obstack ();
1988 }
1989 }
1990
1991 /* Subroutine of instantiate_decls: Process all decls in the given
1992 BLOCK node and all its subblocks. */
1993
1994 static void
1995 instantiate_decls_1 (let, valid_only)
1996 tree let;
1997 int valid_only;
1998 {
1999 tree t;
2000
2001 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2002 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2003 valid_only);
2004
2005 /* Process all subblocks. */
2006 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2007 instantiate_decls_1 (t, valid_only);
2008 }
2009
2010 /* Subroutine of the preceeding procedures: Given RTL representing a
2011 decl and the size of the object, do any instantiation required.
2012
2013 If VALID_ONLY is non-zero, it means that the RTL should only be
2014 changed if the new address is valid. */
2015
2016 static void
2017 instantiate_decl (x, size, valid_only)
2018 rtx x;
2019 int size;
2020 int valid_only;
2021 {
2022 enum machine_mode mode;
2023 rtx addr;
2024
2025 /* If this is not a MEM, no need to do anything. Similarly if the
2026 address is a constant or a register that is not a virtual register. */
2027
2028 if (x == 0 || GET_CODE (x) != MEM)
2029 return;
2030
2031 addr = XEXP (x, 0);
2032 if (CONSTANT_P (addr)
2033 || (GET_CODE (addr) == REG
2034 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2035 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2036 return;
2037
2038 /* If we should only do this if the address is valid, copy the address.
2039 We need to do this so we can undo any changes that might make the
2040 address invalid. This copy is unfortunate, but probably can't be
2041 avoided. */
2042
2043 if (valid_only)
2044 addr = copy_rtx (addr);
2045
2046 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2047
2048 if (! valid_only)
2049 return;
2050
2051 /* Now verify that the resulting address is valid for every integer or
2052 floating-point mode up to and including SIZE bytes long. We do this
2053 since the object might be accessed in any mode and frame addresses
2054 are shared. */
2055
2056 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2057 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2058 mode = GET_MODE_WIDER_MODE (mode))
2059 if (! memory_address_p (mode, addr))
2060 return;
2061
2062 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2063 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2064 mode = GET_MODE_WIDER_MODE (mode))
2065 if (! memory_address_p (mode, addr))
2066 return;
2067
2068 /* Otherwise, put back the address, now that we have updated it and we
2069 know it is valid. */
2070
2071 XEXP (x, 0) = addr;
2072 }
2073 \f
2074 /* Given a pointer to a piece of rtx and an optional pointer to the
2075 containing object, instantiate any virtual registers present in it.
2076
2077 If EXTRA_INSNS, we always do the replacement and generate
2078 any extra insns before OBJECT. If it zero, we do nothing if replacement
2079 is not valid.
2080
2081 Return 1 if we either had nothing to do or if we were able to do the
2082 needed replacement. Return 0 otherwise; we only return zero if
2083 EXTRA_INSNS is zero.
2084
2085 We first try some simple transformations to avoid the creation of extra
2086 pseudos. */
2087
2088 static int
2089 instantiate_virtual_regs_1 (loc, object, extra_insns)
2090 rtx *loc;
2091 rtx object;
2092 int extra_insns;
2093 {
2094 rtx x;
2095 RTX_CODE code;
2096 rtx new = 0;
2097 int offset;
2098 rtx temp;
2099 rtx seq;
2100 int i, j;
2101 char *fmt;
2102
2103 /* Re-start here to avoid recursion in common cases. */
2104 restart:
2105
2106 x = *loc;
2107 if (x == 0)
2108 return 1;
2109
2110 code = GET_CODE (x);
2111
2112 /* Check for some special cases. */
2113 switch (code)
2114 {
2115 case CONST_INT:
2116 case CONST_DOUBLE:
2117 case CONST:
2118 case SYMBOL_REF:
2119 case CODE_LABEL:
2120 case PC:
2121 case CC0:
2122 case ASM_INPUT:
2123 case ADDR_VEC:
2124 case ADDR_DIFF_VEC:
2125 case RETURN:
2126 return 1;
2127
2128 case SET:
2129 /* We are allowed to set the virtual registers. This means that
2130 that the actual register should receive the source minus the
2131 appropriate offset. This is used, for example, in the handling
2132 of non-local gotos. */
2133 if (SET_DEST (x) == virtual_incoming_args_rtx)
2134 new = arg_pointer_rtx, offset = - in_arg_offset;
2135 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2136 new = frame_pointer_rtx, offset = - var_offset;
2137 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2138 new = stack_pointer_rtx, offset = - dynamic_offset;
2139 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2140 new = stack_pointer_rtx, offset = - out_arg_offset;
2141
2142 if (new)
2143 {
2144 /* The only valid sources here are PLUS or REG. Just do
2145 the simplest possible thing to handle them. */
2146 if (GET_CODE (SET_SRC (x)) != REG
2147 && GET_CODE (SET_SRC (x)) != PLUS)
2148 abort ();
2149
2150 start_sequence ();
2151 if (GET_CODE (SET_SRC (x)) != REG)
2152 temp = force_operand (SET_SRC (x), NULL_RTX);
2153 else
2154 temp = SET_SRC (x);
2155 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2156 seq = get_insns ();
2157 end_sequence ();
2158
2159 emit_insns_before (seq, object);
2160 SET_DEST (x) = new;
2161
2162 if (!validate_change (object, &SET_SRC (x), temp, 0)
2163 || ! extra_insns)
2164 abort ();
2165
2166 return 1;
2167 }
2168
2169 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2170 loc = &SET_SRC (x);
2171 goto restart;
2172
2173 case PLUS:
2174 /* Handle special case of virtual register plus constant. */
2175 if (CONSTANT_P (XEXP (x, 1)))
2176 {
2177 rtx old;
2178
2179 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2180 if (GET_CODE (XEXP (x, 0)) == PLUS)
2181 {
2182 rtx inner = XEXP (XEXP (x, 0), 0);
2183
2184 if (inner == virtual_incoming_args_rtx)
2185 new = arg_pointer_rtx, offset = in_arg_offset;
2186 else if (inner == virtual_stack_vars_rtx)
2187 new = frame_pointer_rtx, offset = var_offset;
2188 else if (inner == virtual_stack_dynamic_rtx)
2189 new = stack_pointer_rtx, offset = dynamic_offset;
2190 else if (inner == virtual_outgoing_args_rtx)
2191 new = stack_pointer_rtx, offset = out_arg_offset;
2192 else
2193 {
2194 loc = &XEXP (x, 0);
2195 goto restart;
2196 }
2197
2198 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2199 extra_insns);
2200 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2201 }
2202
2203 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2204 new = arg_pointer_rtx, offset = in_arg_offset;
2205 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2206 new = frame_pointer_rtx, offset = var_offset;
2207 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2208 new = stack_pointer_rtx, offset = dynamic_offset;
2209 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2210 new = stack_pointer_rtx, offset = out_arg_offset;
2211 else
2212 {
2213 /* We know the second operand is a constant. Unless the
2214 first operand is a REG (which has been already checked),
2215 it needs to be checked. */
2216 if (GET_CODE (XEXP (x, 0)) != REG)
2217 {
2218 loc = &XEXP (x, 0);
2219 goto restart;
2220 }
2221 return 1;
2222 }
2223
2224 old = XEXP (x, 0);
2225 XEXP (x, 0) = new;
2226 new = plus_constant (XEXP (x, 1), offset);
2227
2228 /* If the new constant is zero, try to replace the sum with its
2229 first operand. */
2230 if (new == const0_rtx
2231 && validate_change (object, loc, XEXP (x, 0), 0))
2232 return 1;
2233
2234 /* Next try to replace constant with new one. */
2235 if (!validate_change (object, &XEXP (x, 1), new, 0))
2236 {
2237 if (! extra_insns)
2238 {
2239 XEXP (x, 0) = old;
2240 return 0;
2241 }
2242
2243 /* Otherwise copy the new constant into a register and replace
2244 constant with that register. */
2245 temp = gen_reg_rtx (Pmode);
2246 if (validate_change (object, &XEXP (x, 1), temp, 0))
2247 emit_insn_before (gen_move_insn (temp, new), object);
2248 else
2249 {
2250 /* If that didn't work, replace this expression with a
2251 register containing the sum. */
2252
2253 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2254 XEXP (x, 0) = old;
2255
2256 start_sequence ();
2257 temp = force_operand (new, NULL_RTX);
2258 seq = get_insns ();
2259 end_sequence ();
2260
2261 emit_insns_before (seq, object);
2262 if (! validate_change (object, loc, temp, 0)
2263 && ! validate_replace_rtx (x, temp, object))
2264 abort ();
2265 }
2266 }
2267
2268 return 1;
2269 }
2270
2271 /* Fall through to generic two-operand expression case. */
2272 case EXPR_LIST:
2273 case CALL:
2274 case COMPARE:
2275 case MINUS:
2276 case MULT:
2277 case DIV: case UDIV:
2278 case MOD: case UMOD:
2279 case AND: case IOR: case XOR:
2280 case LSHIFT: case ASHIFT: case ROTATE:
2281 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2282 case NE: case EQ:
2283 case GE: case GT: case GEU: case GTU:
2284 case LE: case LT: case LEU: case LTU:
2285 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2286 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2287 loc = &XEXP (x, 0);
2288 goto restart;
2289
2290 case MEM:
2291 /* Most cases of MEM that convert to valid addresses have already been
2292 handled by our scan of regno_reg_rtx. The only special handling we
2293 need here is to make a copy of the rtx to ensure it isn't being
2294 shared if we have to change it to a pseudo.
2295
2296 If the rtx is a simple reference to an address via a virtual register,
2297 it can potentially be shared. In such cases, first try to make it
2298 a valid address, which can also be shared. Otherwise, copy it and
2299 proceed normally.
2300
2301 First check for common cases that need no processing. These are
2302 usually due to instantiation already being done on a previous instance
2303 of a shared rtx. */
2304
2305 temp = XEXP (x, 0);
2306 if (CONSTANT_ADDRESS_P (temp)
2307 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2308 || temp == arg_pointer_rtx
2309 #endif
2310 || temp == frame_pointer_rtx)
2311 return 1;
2312
2313 if (GET_CODE (temp) == PLUS
2314 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2315 && (XEXP (temp, 0) == frame_pointer_rtx
2316 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2317 || XEXP (temp, 0) == arg_pointer_rtx
2318 #endif
2319 ))
2320 return 1;
2321
2322 if (temp == virtual_stack_vars_rtx
2323 || temp == virtual_incoming_args_rtx
2324 || (GET_CODE (temp) == PLUS
2325 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2326 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2327 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2328 {
2329 /* This MEM may be shared. If the substitution can be done without
2330 the need to generate new pseudos, we want to do it in place
2331 so all copies of the shared rtx benefit. The call below will
2332 only make substitutions if the resulting address is still
2333 valid.
2334
2335 Note that we cannot pass X as the object in the recursive call
2336 since the insn being processed may not allow all valid
2337 addresses. However, if we were not passed on object, we can
2338 only modify X without copying it if X will have a valid
2339 address.
2340
2341 ??? Also note that this can still lose if OBJECT is an insn that
2342 has less restrictions on an address that some other insn.
2343 In that case, we will modify the shared address. This case
2344 doesn't seem very likely, though. */
2345
2346 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2347 object ? object : x, 0))
2348 return 1;
2349
2350 /* Otherwise make a copy and process that copy. We copy the entire
2351 RTL expression since it might be a PLUS which could also be
2352 shared. */
2353 *loc = x = copy_rtx (x);
2354 }
2355
2356 /* Fall through to generic unary operation case. */
2357 case USE:
2358 case CLOBBER:
2359 case SUBREG:
2360 case STRICT_LOW_PART:
2361 case NEG: case NOT:
2362 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2363 case SIGN_EXTEND: case ZERO_EXTEND:
2364 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2365 case FLOAT: case FIX:
2366 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2367 case ABS:
2368 case SQRT:
2369 case FFS:
2370 /* These case either have just one operand or we know that we need not
2371 check the rest of the operands. */
2372 loc = &XEXP (x, 0);
2373 goto restart;
2374
2375 case REG:
2376 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2377 in front of this insn and substitute the temporary. */
2378 if (x == virtual_incoming_args_rtx)
2379 new = arg_pointer_rtx, offset = in_arg_offset;
2380 else if (x == virtual_stack_vars_rtx)
2381 new = frame_pointer_rtx, offset = var_offset;
2382 else if (x == virtual_stack_dynamic_rtx)
2383 new = stack_pointer_rtx, offset = dynamic_offset;
2384 else if (x == virtual_outgoing_args_rtx)
2385 new = stack_pointer_rtx, offset = out_arg_offset;
2386
2387 if (new)
2388 {
2389 temp = plus_constant (new, offset);
2390 if (!validate_change (object, loc, temp, 0))
2391 {
2392 if (! extra_insns)
2393 return 0;
2394
2395 start_sequence ();
2396 temp = force_operand (temp, NULL_RTX);
2397 seq = get_insns ();
2398 end_sequence ();
2399
2400 emit_insns_before (seq, object);
2401 if (! validate_change (object, loc, temp, 0)
2402 && ! validate_replace_rtx (x, temp, object))
2403 abort ();
2404 }
2405 }
2406
2407 return 1;
2408 }
2409
2410 /* Scan all subexpressions. */
2411 fmt = GET_RTX_FORMAT (code);
2412 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2413 if (*fmt == 'e')
2414 {
2415 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2416 return 0;
2417 }
2418 else if (*fmt == 'E')
2419 for (j = 0; j < XVECLEN (x, i); j++)
2420 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2421 extra_insns))
2422 return 0;
2423
2424 return 1;
2425 }
2426 \f
2427 /* Optimization: assuming this function does not receive nonlocal gotos,
2428 delete the handlers for such, as well as the insns to establish
2429 and disestablish them. */
2430
2431 static void
2432 delete_handlers ()
2433 {
2434 rtx insn;
2435 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2436 {
2437 /* Delete the handler by turning off the flag that would
2438 prevent jump_optimize from deleting it.
2439 Also permit deletion of the nonlocal labels themselves
2440 if nothing local refers to them. */
2441 if (GET_CODE (insn) == CODE_LABEL)
2442 LABEL_PRESERVE_P (insn) = 0;
2443 if (GET_CODE (insn) == INSN
2444 && ((nonlocal_goto_handler_slot != 0
2445 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
2446 || (nonlocal_goto_stack_level != 0
2447 && reg_mentioned_p (nonlocal_goto_stack_level,
2448 PATTERN (insn)))))
2449 delete_insn (insn);
2450 }
2451 }
2452
2453 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2454 of the current function. */
2455
2456 rtx
2457 nonlocal_label_rtx_list ()
2458 {
2459 tree t;
2460 rtx x = 0;
2461
2462 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2463 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2464
2465 return x;
2466 }
2467 \f
2468 /* Output a USE for any register use in RTL.
2469 This is used with -noreg to mark the extent of lifespan
2470 of any registers used in a user-visible variable's DECL_RTL. */
2471
2472 void
2473 use_variable (rtl)
2474 rtx rtl;
2475 {
2476 if (GET_CODE (rtl) == REG)
2477 /* This is a register variable. */
2478 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2479 else if (GET_CODE (rtl) == MEM
2480 && GET_CODE (XEXP (rtl, 0)) == REG
2481 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2482 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2483 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2484 /* This is a variable-sized structure. */
2485 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2486 }
2487
2488 /* Like use_variable except that it outputs the USEs after INSN
2489 instead of at the end of the insn-chain. */
2490
2491 void
2492 use_variable_after (rtl, insn)
2493 rtx rtl, insn;
2494 {
2495 if (GET_CODE (rtl) == REG)
2496 /* This is a register variable. */
2497 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2498 else if (GET_CODE (rtl) == MEM
2499 && GET_CODE (XEXP (rtl, 0)) == REG
2500 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2501 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2502 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2503 /* This is a variable-sized structure. */
2504 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2505 }
2506 \f
2507 int
2508 max_parm_reg_num ()
2509 {
2510 return max_parm_reg;
2511 }
2512
2513 /* Return the first insn following those generated by `assign_parms'. */
2514
2515 rtx
2516 get_first_nonparm_insn ()
2517 {
2518 if (last_parm_insn)
2519 return NEXT_INSN (last_parm_insn);
2520 return get_insns ();
2521 }
2522
2523 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
2524 Crash if there is none. */
2525
2526 rtx
2527 get_first_block_beg ()
2528 {
2529 register rtx searcher;
2530 register rtx insn = get_first_nonparm_insn ();
2531
2532 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
2533 if (GET_CODE (searcher) == NOTE
2534 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
2535 return searcher;
2536
2537 abort (); /* Invalid call to this function. (See comments above.) */
2538 return NULL_RTX;
2539 }
2540
2541 /* Return 1 if EXP returns an aggregate value, for which an address
2542 must be passed to the function or returned by the function. */
2543
2544 int
2545 aggregate_value_p (exp)
2546 tree exp;
2547 {
2548 int i, regno, nregs;
2549 rtx reg;
2550 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2551 return 1;
2552 if (RETURN_IN_MEMORY (TREE_TYPE (exp)))
2553 return 1;
2554 if (flag_pcc_struct_return
2555 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2556 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE))
2557 return 1;
2558 /* Make sure we have suitable call-clobbered regs to return
2559 the value in; if not, we must return it in memory. */
2560 reg = hard_function_value (TREE_TYPE (exp), 0);
2561 regno = REGNO (reg);
2562 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (TREE_TYPE (exp)));
2563 for (i = 0; i < nregs; i++)
2564 if (! call_used_regs[regno + i])
2565 return 1;
2566 return 0;
2567 }
2568 \f
2569 /* Assign RTL expressions to the function's parameters.
2570 This may involve copying them into registers and using
2571 those registers as the RTL for them.
2572
2573 If SECOND_TIME is non-zero it means that this function is being
2574 called a second time. This is done by integrate.c when a function's
2575 compilation is deferred. We need to come back here in case the
2576 FUNCTION_ARG macro computes items needed for the rest of the compilation
2577 (such as changing which registers are fixed or caller-saved). But suppress
2578 writing any insns or setting DECL_RTL of anything in this case. */
2579
2580 void
2581 assign_parms (fndecl, second_time)
2582 tree fndecl;
2583 int second_time;
2584 {
2585 register tree parm;
2586 register rtx entry_parm = 0;
2587 register rtx stack_parm = 0;
2588 CUMULATIVE_ARGS args_so_far;
2589 enum machine_mode promoted_mode, passed_mode, nominal_mode;
2590 int unsignedp;
2591 /* Total space needed so far for args on the stack,
2592 given as a constant and a tree-expression. */
2593 struct args_size stack_args_size;
2594 tree fntype = TREE_TYPE (fndecl);
2595 tree fnargs = DECL_ARGUMENTS (fndecl);
2596 /* This is used for the arg pointer when referring to stack args. */
2597 rtx internal_arg_pointer;
2598 /* This is a dummy PARM_DECL that we used for the function result if
2599 the function returns a structure. */
2600 tree function_result_decl = 0;
2601 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2602 int varargs_setup = 0;
2603
2604 /* Nonzero if the last arg is named `__builtin_va_alist',
2605 which is used on some machines for old-fashioned non-ANSI varargs.h;
2606 this should be stuck onto the stack as if it had arrived there. */
2607 int vararg
2608 = (fnargs
2609 && (parm = tree_last (fnargs)) != 0
2610 && DECL_NAME (parm)
2611 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2612 "__builtin_va_alist")));
2613
2614 /* Nonzero if function takes extra anonymous args.
2615 This means the last named arg must be on the stack
2616 right before the anonymous ones. */
2617 int stdarg
2618 = (TYPE_ARG_TYPES (fntype) != 0
2619 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2620 != void_type_node));
2621
2622 /* If the reg that the virtual arg pointer will be translated into is
2623 not a fixed reg or is the stack pointer, make a copy of the virtual
2624 arg pointer, and address parms via the copy. The frame pointer is
2625 considered fixed even though it is not marked as such.
2626
2627 The second time through, simply use ap to avoid generating rtx. */
2628
2629 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2630 || ! (fixed_regs[ARG_POINTER_REGNUM]
2631 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2632 && ! second_time)
2633 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2634 else
2635 internal_arg_pointer = virtual_incoming_args_rtx;
2636 current_function_internal_arg_pointer = internal_arg_pointer;
2637
2638 stack_args_size.constant = 0;
2639 stack_args_size.var = 0;
2640
2641 /* If struct value address is treated as the first argument, make it so. */
2642 if (aggregate_value_p (DECL_RESULT (fndecl))
2643 && ! current_function_returns_pcc_struct
2644 && struct_value_incoming_rtx == 0)
2645 {
2646 tree type = build_pointer_type (fntype);
2647
2648 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
2649
2650 DECL_ARG_TYPE (function_result_decl) = type;
2651 TREE_CHAIN (function_result_decl) = fnargs;
2652 fnargs = function_result_decl;
2653 }
2654
2655 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2656 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2657
2658 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2659 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_PTR);
2660 #else
2661 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_PTR);
2662 #endif
2663
2664 /* We haven't yet found an argument that we must push and pretend the
2665 caller did. */
2666 current_function_pretend_args_size = 0;
2667
2668 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2669 {
2670 int aggregate
2671 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2672 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
2673 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE);
2674 struct args_size stack_offset;
2675 struct args_size arg_size;
2676 int passed_pointer = 0;
2677 tree passed_type = DECL_ARG_TYPE (parm);
2678
2679 /* Set LAST_NAMED if this is last named arg before some
2680 anonymous args. We treat it as if it were anonymous too. */
2681 int last_named = ((TREE_CHAIN (parm) == 0
2682 || DECL_NAME (TREE_CHAIN (parm)) == 0)
2683 && (vararg || stdarg));
2684
2685 if (TREE_TYPE (parm) == error_mark_node
2686 /* This can happen after weird syntax errors
2687 or if an enum type is defined among the parms. */
2688 || TREE_CODE (parm) != PARM_DECL
2689 || passed_type == NULL)
2690 {
2691 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
2692 const0_rtx);
2693 TREE_USED (parm) = 1;
2694 continue;
2695 }
2696
2697 /* For varargs.h function, save info about regs and stack space
2698 used by the individual args, not including the va_alist arg. */
2699 if (vararg && last_named)
2700 current_function_args_info = args_so_far;
2701
2702 /* Find mode of arg as it is passed, and mode of arg
2703 as it should be during execution of this function. */
2704 passed_mode = TYPE_MODE (passed_type);
2705 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
2706
2707 /* If the parm's mode is VOID, its value doesn't matter,
2708 and avoid the usual things like emit_move_insn that could crash. */
2709 if (nominal_mode == VOIDmode)
2710 {
2711 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
2712 continue;
2713 }
2714
2715 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2716 /* See if this arg was passed by invisible reference. */
2717 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2718 passed_type, ! last_named))
2719 {
2720 passed_type = build_pointer_type (passed_type);
2721 passed_pointer = 1;
2722 passed_mode = nominal_mode = Pmode;
2723 }
2724 #endif
2725
2726 promoted_mode = passed_mode;
2727
2728 #ifdef PROMOTE_FUNCTION_ARGS
2729 /* Compute the mode in which the arg is actually extended to. */
2730 if (TREE_CODE (passed_type) == INTEGER_TYPE
2731 || TREE_CODE (passed_type) == ENUMERAL_TYPE
2732 || TREE_CODE (passed_type) == BOOLEAN_TYPE
2733 || TREE_CODE (passed_type) == CHAR_TYPE
2734 || TREE_CODE (passed_type) == REAL_TYPE
2735 || TREE_CODE (passed_type) == POINTER_TYPE
2736 || TREE_CODE (passed_type) == OFFSET_TYPE)
2737 {
2738 unsignedp = TREE_UNSIGNED (passed_type);
2739 PROMOTE_MODE (promoted_mode, unsignedp, passed_type);
2740 }
2741 #endif
2742
2743 /* Let machine desc say which reg (if any) the parm arrives in.
2744 0 means it arrives on the stack. */
2745 #ifdef FUNCTION_INCOMING_ARG
2746 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
2747 passed_type, ! last_named);
2748 #else
2749 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
2750 passed_type, ! last_named);
2751 #endif
2752
2753 if (entry_parm)
2754 passed_mode = promoted_mode;
2755
2756 #ifdef SETUP_INCOMING_VARARGS
2757 /* If this is the last named parameter, do any required setup for
2758 varargs or stdargs. We need to know about the case of this being an
2759 addressable type, in which case we skip the registers it
2760 would have arrived in.
2761
2762 For stdargs, LAST_NAMED will be set for two parameters, the one that
2763 is actually the last named, and the dummy parameter. We only
2764 want to do this action once.
2765
2766 Also, indicate when RTL generation is to be suppressed. */
2767 if (last_named && !varargs_setup)
2768 {
2769 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
2770 current_function_pretend_args_size,
2771 second_time);
2772 varargs_setup = 1;
2773 }
2774 #endif
2775
2776 /* Determine parm's home in the stack,
2777 in case it arrives in the stack or we should pretend it did.
2778
2779 Compute the stack position and rtx where the argument arrives
2780 and its size.
2781
2782 There is one complexity here: If this was a parameter that would
2783 have been passed in registers, but wasn't only because it is
2784 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2785 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2786 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2787 0 as it was the previous time. */
2788
2789 locate_and_pad_parm (passed_mode, passed_type,
2790 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2791 1,
2792 #else
2793 #ifdef FUNCTION_INCOMING_ARG
2794 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2795 passed_type,
2796 (! last_named
2797 || varargs_setup)) != 0,
2798 #else
2799 FUNCTION_ARG (args_so_far, passed_mode,
2800 passed_type,
2801 ! last_named || varargs_setup) != 0,
2802 #endif
2803 #endif
2804 fndecl, &stack_args_size, &stack_offset, &arg_size);
2805
2806 if (! second_time)
2807 {
2808 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
2809
2810 if (offset_rtx == const0_rtx)
2811 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
2812 else
2813 stack_parm = gen_rtx (MEM, passed_mode,
2814 gen_rtx (PLUS, Pmode,
2815 internal_arg_pointer, offset_rtx));
2816
2817 /* If this is a memory ref that contains aggregate components,
2818 mark it as such for cse and loop optimize. */
2819 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2820 }
2821
2822 /* If this parameter was passed both in registers and in the stack,
2823 use the copy on the stack. */
2824 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
2825 entry_parm = 0;
2826
2827 /* If this parm was passed part in regs and part in memory,
2828 pretend it arrived entirely in memory
2829 by pushing the register-part onto the stack.
2830
2831 In the special case of a DImode or DFmode that is split,
2832 we could put it together in a pseudoreg directly,
2833 but for now that's not worth bothering with. */
2834
2835 if (entry_parm)
2836 {
2837 int nregs = 0;
2838 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2839 nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
2840 passed_type, ! last_named);
2841 #endif
2842
2843 if (nregs > 0)
2844 {
2845 current_function_pretend_args_size
2846 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
2847 / (PARM_BOUNDARY / BITS_PER_UNIT)
2848 * (PARM_BOUNDARY / BITS_PER_UNIT));
2849
2850 if (! second_time)
2851 move_block_from_reg (REGNO (entry_parm),
2852 validize_mem (stack_parm), nregs);
2853 entry_parm = stack_parm;
2854 }
2855 }
2856
2857 /* If we didn't decide this parm came in a register,
2858 by default it came on the stack. */
2859 if (entry_parm == 0)
2860 entry_parm = stack_parm;
2861
2862 /* Record permanently how this parm was passed. */
2863 if (! second_time)
2864 DECL_INCOMING_RTL (parm) = entry_parm;
2865
2866 /* If there is actually space on the stack for this parm,
2867 count it in stack_args_size; otherwise set stack_parm to 0
2868 to indicate there is no preallocated stack slot for the parm. */
2869
2870 if (entry_parm == stack_parm
2871 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
2872 /* On some machines, even if a parm value arrives in a register
2873 there is still an (uninitialized) stack slot allocated for it.
2874
2875 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
2876 whether this parameter already has a stack slot allocated,
2877 because an arg block exists only if current_function_args_size
2878 is larger than some threshhold, and we haven't calculated that
2879 yet. So, for now, we just assume that stack slots never exist
2880 in this case. */
2881 || REG_PARM_STACK_SPACE (fndecl) > 0
2882 #endif
2883 )
2884 {
2885 stack_args_size.constant += arg_size.constant;
2886 if (arg_size.var)
2887 ADD_PARM_SIZE (stack_args_size, arg_size.var);
2888 }
2889 else
2890 /* No stack slot was pushed for this parm. */
2891 stack_parm = 0;
2892
2893 /* Update info on where next arg arrives in registers. */
2894
2895 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
2896 passed_type, ! last_named);
2897
2898 /* If this is our second time through, we are done with this parm. */
2899 if (second_time)
2900 continue;
2901
2902 /* If we can't trust the parm stack slot to be aligned enough
2903 for its ultimate type, don't use that slot after entry.
2904 We'll make another stack slot, if we need one. */
2905 {
2906 #ifdef FUNCTION_ARG_BOUNDARY
2907 int thisparm_boundary
2908 = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type);
2909 #else
2910 int thisparm_boundary = PARM_BOUNDARY;
2911 #endif
2912
2913 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
2914 stack_parm = 0;
2915 }
2916
2917 /* Now adjust STACK_PARM to the mode and precise location
2918 where this parameter should live during execution,
2919 if we discover that it must live in the stack during execution.
2920 To make debuggers happier on big-endian machines, we store
2921 the value in the last bytes of the space available. */
2922
2923 if (nominal_mode != BLKmode && nominal_mode != passed_mode
2924 && stack_parm != 0)
2925 {
2926 rtx offset_rtx;
2927
2928 #if BYTES_BIG_ENDIAN
2929 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
2930 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
2931 - GET_MODE_SIZE (nominal_mode));
2932 #endif
2933
2934 offset_rtx = ARGS_SIZE_RTX (stack_offset);
2935 if (offset_rtx == const0_rtx)
2936 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
2937 else
2938 stack_parm = gen_rtx (MEM, nominal_mode,
2939 gen_rtx (PLUS, Pmode,
2940 internal_arg_pointer, offset_rtx));
2941
2942 /* If this is a memory ref that contains aggregate components,
2943 mark it as such for cse and loop optimize. */
2944 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2945 }
2946
2947 /* ENTRY_PARM is an RTX for the parameter as it arrives,
2948 in the mode in which it arrives.
2949 STACK_PARM is an RTX for a stack slot where the parameter can live
2950 during the function (in case we want to put it there).
2951 STACK_PARM is 0 if no stack slot was pushed for it.
2952
2953 Now output code if necessary to convert ENTRY_PARM to
2954 the type in which this function declares it,
2955 and store that result in an appropriate place,
2956 which may be a pseudo reg, may be STACK_PARM,
2957 or may be a local stack slot if STACK_PARM is 0.
2958
2959 Set DECL_RTL to that place. */
2960
2961 if (nominal_mode == BLKmode)
2962 {
2963 /* If a BLKmode arrives in registers, copy it to a stack slot. */
2964 if (GET_CODE (entry_parm) == REG)
2965 {
2966 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
2967 UNITS_PER_WORD);
2968
2969 /* Note that we will be storing an integral number of words.
2970 So we have to be careful to ensure that we allocate an
2971 integral number of words. We do this below in the
2972 assign_stack_local if space was not allocated in the argument
2973 list. If it was, this will not work if PARM_BOUNDARY is not
2974 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2975 if it becomes a problem. */
2976
2977 if (stack_parm == 0)
2978 {
2979 stack_parm
2980 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
2981 /* If this is a memory ref that contains aggregate components,
2982 mark it as such for cse and loop optimize. */
2983 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2984 }
2985
2986 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
2987 abort ();
2988
2989 move_block_from_reg (REGNO (entry_parm),
2990 validize_mem (stack_parm),
2991 size_stored / UNITS_PER_WORD);
2992 }
2993 DECL_RTL (parm) = stack_parm;
2994 }
2995 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
2996 && ! DECL_INLINE (fndecl))
2997 /* layout_decl may set this. */
2998 || TREE_ADDRESSABLE (parm)
2999 || TREE_SIDE_EFFECTS (parm)
3000 /* If -ffloat-store specified, don't put explicit
3001 float variables into registers. */
3002 || (flag_float_store
3003 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3004 /* Always assign pseudo to structure return or item passed
3005 by invisible reference. */
3006 || passed_pointer || parm == function_result_decl)
3007 {
3008 /* Store the parm in a pseudoregister during the function, but we
3009 may need to do it in a wider mode. */
3010
3011 register rtx parmreg;
3012
3013 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3014 if (TREE_CODE (TREE_TYPE (parm)) == INTEGER_TYPE
3015 || TREE_CODE (TREE_TYPE (parm)) == ENUMERAL_TYPE
3016 || TREE_CODE (TREE_TYPE (parm)) == BOOLEAN_TYPE
3017 || TREE_CODE (TREE_TYPE (parm)) == CHAR_TYPE
3018 || TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE
3019 || TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE
3020 || TREE_CODE (TREE_TYPE (parm)) == OFFSET_TYPE)
3021 {
3022 PROMOTE_MODE (nominal_mode, unsignedp, TREE_TYPE (parm));
3023 }
3024
3025 parmreg = gen_reg_rtx (nominal_mode);
3026 REG_USERVAR_P (parmreg) = 1;
3027
3028 /* If this was an item that we received a pointer to, set DECL_RTL
3029 appropriately. */
3030 if (passed_pointer)
3031 {
3032 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3033 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3034 }
3035 else
3036 DECL_RTL (parm) = parmreg;
3037
3038 /* Copy the value into the register. */
3039 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
3040 {
3041 /* If ENTRY_PARM is a hard register, it might be in a register
3042 not valid for operating in its mode (e.g., an odd-numbered
3043 register for a DFmode). In that case, moves are the only
3044 thing valid, so we can't do a convert from there. This
3045 occurs when the calling sequence allow such misaligned
3046 usages. */
3047 if (GET_CODE (entry_parm) == REG
3048 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
3049 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm),
3050 GET_MODE (entry_parm)))
3051 convert_move (parmreg, copy_to_reg (entry_parm), unsignedp);
3052 else
3053 convert_move (parmreg, validize_mem (entry_parm), unsignedp);
3054 }
3055 else
3056 emit_move_insn (parmreg, validize_mem (entry_parm));
3057
3058 /* If we were passed a pointer but the actual value
3059 can safely live in a register, put it in one. */
3060 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3061 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3062 && ! DECL_INLINE (fndecl))
3063 /* layout_decl may set this. */
3064 || TREE_ADDRESSABLE (parm)
3065 || TREE_SIDE_EFFECTS (parm)
3066 /* If -ffloat-store specified, don't put explicit
3067 float variables into registers. */
3068 || (flag_float_store
3069 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3070 {
3071 /* We can't use nominal_mode, because it will have been set to
3072 Pmode above. We must use the actual mode of the parm. */
3073 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3074 emit_move_insn (parmreg, DECL_RTL (parm));
3075 DECL_RTL (parm) = parmreg;
3076 }
3077
3078 /* In any case, record the parm's desired stack location
3079 in case we later discover it must live in the stack. */
3080 if (REGNO (parmreg) >= nparmregs)
3081 {
3082 rtx *new;
3083 nparmregs = REGNO (parmreg) + 5;
3084 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3085 bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx));
3086 parm_reg_stack_loc = new;
3087 }
3088 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3089
3090 /* Mark the register as eliminable if we did no conversion
3091 and it was copied from memory at a fixed offset,
3092 and the arg pointer was not copied to a pseudo-reg.
3093 If the arg pointer is a pseudo reg or the offset formed
3094 an invalid address, such memory-equivalences
3095 as we make here would screw up life analysis for it. */
3096 if (nominal_mode == passed_mode
3097 && GET_CODE (entry_parm) == MEM
3098 && entry_parm == stack_parm
3099 && stack_offset.var == 0
3100 && reg_mentioned_p (virtual_incoming_args_rtx,
3101 XEXP (entry_parm, 0)))
3102 REG_NOTES (get_last_insn ())
3103 = gen_rtx (EXPR_LIST, REG_EQUIV,
3104 entry_parm, REG_NOTES (get_last_insn ()));
3105
3106 /* For pointer data type, suggest pointer register. */
3107 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3108 mark_reg_pointer (parmreg);
3109 }
3110 else
3111 {
3112 /* Value must be stored in the stack slot STACK_PARM
3113 during function execution. */
3114
3115 if (passed_mode != nominal_mode)
3116 {
3117 /* Conversion is required. */
3118 if (GET_CODE (entry_parm) == REG
3119 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
3120 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm), passed_mode))
3121 entry_parm = copy_to_reg (entry_parm);
3122
3123 entry_parm = convert_to_mode (nominal_mode, entry_parm,
3124 TREE_UNSIGNED (TREE_TYPE (parm)));
3125 }
3126
3127 if (entry_parm != stack_parm)
3128 {
3129 if (stack_parm == 0)
3130 {
3131 stack_parm
3132 = assign_stack_local (GET_MODE (entry_parm),
3133 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3134 /* If this is a memory ref that contains aggregate components,
3135 mark it as such for cse and loop optimize. */
3136 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3137 }
3138
3139 emit_move_insn (validize_mem (stack_parm),
3140 validize_mem (entry_parm));
3141 }
3142
3143 DECL_RTL (parm) = stack_parm;
3144 }
3145
3146 /* If this "parameter" was the place where we are receiving the
3147 function's incoming structure pointer, set up the result. */
3148 if (parm == function_result_decl)
3149 DECL_RTL (DECL_RESULT (fndecl))
3150 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
3151
3152 if (TREE_THIS_VOLATILE (parm))
3153 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
3154 if (TREE_READONLY (parm))
3155 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
3156 }
3157
3158 max_parm_reg = max_reg_num ();
3159 last_parm_insn = get_last_insn ();
3160
3161 current_function_args_size = stack_args_size.constant;
3162
3163 /* Adjust function incoming argument size for alignment and
3164 minimum length. */
3165
3166 #ifdef REG_PARM_STACK_SPACE
3167 #ifndef MAYBE_REG_PARM_STACK_SPACE
3168 current_function_args_size = MAX (current_function_args_size,
3169 REG_PARM_STACK_SPACE (fndecl));
3170 #endif
3171 #endif
3172
3173 #ifdef STACK_BOUNDARY
3174 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
3175
3176 current_function_args_size
3177 = ((current_function_args_size + STACK_BYTES - 1)
3178 / STACK_BYTES) * STACK_BYTES;
3179 #endif
3180
3181 #ifdef ARGS_GROW_DOWNWARD
3182 current_function_arg_offset_rtx
3183 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
3184 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
3185 size_int (-stack_args_size.constant)),
3186 NULL_RTX, VOIDmode, 0));
3187 #else
3188 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
3189 #endif
3190
3191 /* See how many bytes, if any, of its args a function should try to pop
3192 on return. */
3193
3194 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
3195 current_function_args_size);
3196
3197 /* For stdarg.h function, save info about regs and stack space
3198 used by the named args. */
3199
3200 if (stdarg)
3201 current_function_args_info = args_so_far;
3202
3203 /* Set the rtx used for the function return value. Put this in its
3204 own variable so any optimizers that need this information don't have
3205 to include tree.h. Do this here so it gets done when an inlined
3206 function gets output. */
3207
3208 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
3209 }
3210 \f
3211 /* Compute the size and offset from the start of the stacked arguments for a
3212 parm passed in mode PASSED_MODE and with type TYPE.
3213
3214 INITIAL_OFFSET_PTR points to the current offset into the stacked
3215 arguments.
3216
3217 The starting offset and size for this parm are returned in *OFFSET_PTR
3218 and *ARG_SIZE_PTR, respectively.
3219
3220 IN_REGS is non-zero if the argument will be passed in registers. It will
3221 never be set if REG_PARM_STACK_SPACE is not defined.
3222
3223 FNDECL is the function in which the argument was defined.
3224
3225 There are two types of rounding that are done. The first, controlled by
3226 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3227 list to be aligned to the specific boundary (in bits). This rounding
3228 affects the initial and starting offsets, but not the argument size.
3229
3230 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3231 optionally rounds the size of the parm to PARM_BOUNDARY. The
3232 initial offset is not affected by this rounding, while the size always
3233 is and the starting offset may be. */
3234
3235 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
3236 initial_offset_ptr is positive because locate_and_pad_parm's
3237 callers pass in the total size of args so far as
3238 initial_offset_ptr. arg_size_ptr is always positive.*/
3239
3240 static void pad_to_arg_alignment (), pad_below ();
3241
3242 void
3243 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
3244 initial_offset_ptr, offset_ptr, arg_size_ptr)
3245 enum machine_mode passed_mode;
3246 tree type;
3247 int in_regs;
3248 tree fndecl;
3249 struct args_size *initial_offset_ptr;
3250 struct args_size *offset_ptr;
3251 struct args_size *arg_size_ptr;
3252 {
3253 tree sizetree
3254 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3255 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3256 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3257 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3258 int reg_parm_stack_space = 0;
3259
3260 #ifdef REG_PARM_STACK_SPACE
3261 /* If we have found a stack parm before we reach the end of the
3262 area reserved for registers, skip that area. */
3263 if (! in_regs)
3264 {
3265 #ifdef MAYBE_REG_PARM_STACK_SPACE
3266 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3267 #else
3268 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3269 #endif
3270 if (reg_parm_stack_space > 0)
3271 {
3272 if (initial_offset_ptr->var)
3273 {
3274 initial_offset_ptr->var
3275 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3276 size_int (reg_parm_stack_space));
3277 initial_offset_ptr->constant = 0;
3278 }
3279 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3280 initial_offset_ptr->constant = reg_parm_stack_space;
3281 }
3282 }
3283 #endif /* REG_PARM_STACK_SPACE */
3284
3285 arg_size_ptr->var = 0;
3286 arg_size_ptr->constant = 0;
3287
3288 #ifdef ARGS_GROW_DOWNWARD
3289 if (initial_offset_ptr->var)
3290 {
3291 offset_ptr->constant = 0;
3292 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3293 initial_offset_ptr->var);
3294 }
3295 else
3296 {
3297 offset_ptr->constant = - initial_offset_ptr->constant;
3298 offset_ptr->var = 0;
3299 }
3300 if (where_pad == upward
3301 && (TREE_CODE (sizetree) != INTEGER_CST
3302 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3303 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3304 SUB_PARM_SIZE (*offset_ptr, sizetree);
3305 pad_to_arg_alignment (offset_ptr, boundary);
3306 if (initial_offset_ptr->var)
3307 {
3308 arg_size_ptr->var = size_binop (MINUS_EXPR,
3309 size_binop (MINUS_EXPR,
3310 integer_zero_node,
3311 initial_offset_ptr->var),
3312 offset_ptr->var);
3313 }
3314 else
3315 {
3316 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3317 offset_ptr->constant);
3318 }
3319 /* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3320 if (where_pad == downward)
3321 pad_below (arg_size_ptr, passed_mode, sizetree);
3322 #else /* !ARGS_GROW_DOWNWARD */
3323 pad_to_arg_alignment (initial_offset_ptr, boundary);
3324 *offset_ptr = *initial_offset_ptr;
3325 if (where_pad == downward)
3326 pad_below (offset_ptr, passed_mode, sizetree);
3327
3328 #ifdef PUSH_ROUNDING
3329 if (passed_mode != BLKmode)
3330 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3331 #endif
3332
3333 if (where_pad != none
3334 && (TREE_CODE (sizetree) != INTEGER_CST
3335 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3336 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3337
3338 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3339 #endif /* ARGS_GROW_DOWNWARD */
3340 }
3341
3342 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3343 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3344
3345 static void
3346 pad_to_arg_alignment (offset_ptr, boundary)
3347 struct args_size *offset_ptr;
3348 int boundary;
3349 {
3350 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3351
3352 if (boundary > BITS_PER_UNIT)
3353 {
3354 if (offset_ptr->var)
3355 {
3356 offset_ptr->var =
3357 #ifdef ARGS_GROW_DOWNWARD
3358 round_down
3359 #else
3360 round_up
3361 #endif
3362 (ARGS_SIZE_TREE (*offset_ptr),
3363 boundary / BITS_PER_UNIT);
3364 offset_ptr->constant = 0; /*?*/
3365 }
3366 else
3367 offset_ptr->constant =
3368 #ifdef ARGS_GROW_DOWNWARD
3369 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3370 #else
3371 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3372 #endif
3373 }
3374 }
3375
3376 static void
3377 pad_below (offset_ptr, passed_mode, sizetree)
3378 struct args_size *offset_ptr;
3379 enum machine_mode passed_mode;
3380 tree sizetree;
3381 {
3382 if (passed_mode != BLKmode)
3383 {
3384 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3385 offset_ptr->constant
3386 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3387 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3388 - GET_MODE_SIZE (passed_mode));
3389 }
3390 else
3391 {
3392 if (TREE_CODE (sizetree) != INTEGER_CST
3393 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3394 {
3395 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3396 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3397 /* Add it in. */
3398 ADD_PARM_SIZE (*offset_ptr, s2);
3399 SUB_PARM_SIZE (*offset_ptr, sizetree);
3400 }
3401 }
3402 }
3403
3404 static tree
3405 round_down (value, divisor)
3406 tree value;
3407 int divisor;
3408 {
3409 return size_binop (MULT_EXPR,
3410 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3411 size_int (divisor));
3412 }
3413 \f
3414 /* Walk the tree of blocks describing the binding levels within a function
3415 and warn about uninitialized variables.
3416 This is done after calling flow_analysis and before global_alloc
3417 clobbers the pseudo-regs to hard regs. */
3418
3419 void
3420 uninitialized_vars_warning (block)
3421 tree block;
3422 {
3423 register tree decl, sub;
3424 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3425 {
3426 if (TREE_CODE (decl) == VAR_DECL
3427 /* These warnings are unreliable for and aggregates
3428 because assigning the fields one by one can fail to convince
3429 flow.c that the entire aggregate was initialized.
3430 Unions are troublesome because members may be shorter. */
3431 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3432 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
3433 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3434 && DECL_RTL (decl) != 0
3435 && GET_CODE (DECL_RTL (decl)) == REG
3436 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3437 warning_with_decl (decl,
3438 "`%s' may be used uninitialized in this function");
3439 if (TREE_CODE (decl) == VAR_DECL
3440 && DECL_RTL (decl) != 0
3441 && GET_CODE (DECL_RTL (decl)) == REG
3442 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3443 warning_with_decl (decl,
3444 "variable `%s' may be clobbered by `longjmp'");
3445 }
3446 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3447 uninitialized_vars_warning (sub);
3448 }
3449
3450 /* Do the appropriate part of uninitialized_vars_warning
3451 but for arguments instead of local variables. */
3452
3453 void
3454 setjmp_args_warning (block)
3455 tree block;
3456 {
3457 register tree decl;
3458 for (decl = DECL_ARGUMENTS (current_function_decl);
3459 decl; decl = TREE_CHAIN (decl))
3460 if (DECL_RTL (decl) != 0
3461 && GET_CODE (DECL_RTL (decl)) == REG
3462 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3463 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp'");
3464 }
3465
3466 /* If this function call setjmp, put all vars into the stack
3467 unless they were declared `register'. */
3468
3469 void
3470 setjmp_protect (block)
3471 tree block;
3472 {
3473 register tree decl, sub;
3474 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3475 if ((TREE_CODE (decl) == VAR_DECL
3476 || TREE_CODE (decl) == PARM_DECL)
3477 && DECL_RTL (decl) != 0
3478 && GET_CODE (DECL_RTL (decl)) == REG
3479 /* If this variable came from an inline function, it must be
3480 that it's life doesn't overlap the setjmp. If there was a
3481 setjmp in the function, it would already be in memory. We
3482 must exclude such variable because their DECL_RTL might be
3483 set to strange things such as virtual_stack_vars_rtx. */
3484 && ! DECL_FROM_INLINE (decl)
3485 && (
3486 #ifdef NON_SAVING_SETJMP
3487 /* If longjmp doesn't restore the registers,
3488 don't put anything in them. */
3489 NON_SAVING_SETJMP
3490 ||
3491 #endif
3492 ! DECL_REGISTER (decl)))
3493 put_var_into_stack (decl);
3494 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3495 setjmp_protect (sub);
3496 }
3497 \f
3498 /* Like the previous function, but for args instead of local variables. */
3499
3500 void
3501 setjmp_protect_args ()
3502 {
3503 register tree decl, sub;
3504 for (decl = DECL_ARGUMENTS (current_function_decl);
3505 decl; decl = TREE_CHAIN (decl))
3506 if ((TREE_CODE (decl) == VAR_DECL
3507 || TREE_CODE (decl) == PARM_DECL)
3508 && DECL_RTL (decl) != 0
3509 && GET_CODE (DECL_RTL (decl)) == REG
3510 && (
3511 /* If longjmp doesn't restore the registers,
3512 don't put anything in them. */
3513 #ifdef NON_SAVING_SETJMP
3514 NON_SAVING_SETJMP
3515 ||
3516 #endif
3517 ! DECL_REGISTER (decl)))
3518 put_var_into_stack (decl);
3519 }
3520 \f
3521 /* Return the context-pointer register corresponding to DECL,
3522 or 0 if it does not need one. */
3523
3524 rtx
3525 lookup_static_chain (decl)
3526 tree decl;
3527 {
3528 tree context = decl_function_context (decl);
3529 tree link;
3530
3531 if (context == 0)
3532 return 0;
3533
3534 /* We treat inline_function_decl as an alias for the current function
3535 because that is the inline function whose vars, types, etc.
3536 are being merged into the current function.
3537 See expand_inline_function. */
3538 if (context == current_function_decl || context == inline_function_decl)
3539 return virtual_stack_vars_rtx;
3540
3541 for (link = context_display; link; link = TREE_CHAIN (link))
3542 if (TREE_PURPOSE (link) == context)
3543 return RTL_EXPR_RTL (TREE_VALUE (link));
3544
3545 abort ();
3546 }
3547 \f
3548 /* Convert a stack slot address ADDR for variable VAR
3549 (from a containing function)
3550 into an address valid in this function (using a static chain). */
3551
3552 rtx
3553 fix_lexical_addr (addr, var)
3554 rtx addr;
3555 tree var;
3556 {
3557 rtx basereg;
3558 int displacement;
3559 tree context = decl_function_context (var);
3560 struct function *fp;
3561 rtx base = 0;
3562
3563 /* If this is the present function, we need not do anything. */
3564 if (context == current_function_decl || context == inline_function_decl)
3565 return addr;
3566
3567 for (fp = outer_function_chain; fp; fp = fp->next)
3568 if (fp->decl == context)
3569 break;
3570
3571 if (fp == 0)
3572 abort ();
3573
3574 /* Decode given address as base reg plus displacement. */
3575 if (GET_CODE (addr) == REG)
3576 basereg = addr, displacement = 0;
3577 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3578 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3579 else
3580 abort ();
3581
3582 /* We accept vars reached via the containing function's
3583 incoming arg pointer and via its stack variables pointer. */
3584 if (basereg == fp->internal_arg_pointer)
3585 {
3586 /* If reached via arg pointer, get the arg pointer value
3587 out of that function's stack frame.
3588
3589 There are two cases: If a separate ap is needed, allocate a
3590 slot in the outer function for it and dereference it that way.
3591 This is correct even if the real ap is actually a pseudo.
3592 Otherwise, just adjust the offset from the frame pointer to
3593 compensate. */
3594
3595 #ifdef NEED_SEPARATE_AP
3596 rtx addr;
3597
3598 if (fp->arg_pointer_save_area == 0)
3599 fp->arg_pointer_save_area
3600 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
3601
3602 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
3603 addr = memory_address (Pmode, addr);
3604
3605 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
3606 #else
3607 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
3608 base = lookup_static_chain (var);
3609 #endif
3610 }
3611
3612 else if (basereg == virtual_stack_vars_rtx)
3613 {
3614 /* This is the same code as lookup_static_chain, duplicated here to
3615 avoid an extra call to decl_function_context. */
3616 tree link;
3617
3618 for (link = context_display; link; link = TREE_CHAIN (link))
3619 if (TREE_PURPOSE (link) == context)
3620 {
3621 base = RTL_EXPR_RTL (TREE_VALUE (link));
3622 break;
3623 }
3624 }
3625
3626 if (base == 0)
3627 abort ();
3628
3629 /* Use same offset, relative to appropriate static chain or argument
3630 pointer. */
3631 return plus_constant (base, displacement);
3632 }
3633 \f
3634 /* Return the address of the trampoline for entering nested fn FUNCTION.
3635 If necessary, allocate a trampoline (in the stack frame)
3636 and emit rtl to initialize its contents (at entry to this function). */
3637
3638 rtx
3639 trampoline_address (function)
3640 tree function;
3641 {
3642 tree link;
3643 tree rtlexp;
3644 rtx tramp;
3645 struct function *fp;
3646 tree fn_context;
3647
3648 /* Find an existing trampoline and return it. */
3649 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3650 if (TREE_PURPOSE (link) == function)
3651 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
3652 for (fp = outer_function_chain; fp; fp = fp->next)
3653 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
3654 if (TREE_PURPOSE (link) == function)
3655 {
3656 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
3657 function);
3658 return round_trampoline_addr (tramp);
3659 }
3660
3661 /* None exists; we must make one. */
3662
3663 /* Find the `struct function' for the function containing FUNCTION. */
3664 fp = 0;
3665 fn_context = decl_function_context (function);
3666 if (fn_context != current_function_decl)
3667 for (fp = outer_function_chain; fp; fp = fp->next)
3668 if (fp->decl == fn_context)
3669 break;
3670
3671 /* Allocate run-time space for this trampoline
3672 (usually in the defining function's stack frame). */
3673 #ifdef ALLOCATE_TRAMPOLINE
3674 tramp = ALLOCATE_TRAMPOLINE (fp);
3675 #else
3676 /* If rounding needed, allocate extra space
3677 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3678 #ifdef TRAMPOLINE_ALIGNMENT
3679 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3680 #else
3681 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3682 #endif
3683 if (fp != 0)
3684 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
3685 else
3686 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
3687 #endif
3688
3689 /* Record the trampoline for reuse and note it for later initialization
3690 by expand_function_end. */
3691 if (fp != 0)
3692 {
3693 push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack);
3694 rtlexp = make_node (RTL_EXPR);
3695 RTL_EXPR_RTL (rtlexp) = tramp;
3696 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
3697 pop_obstacks ();
3698 }
3699 else
3700 {
3701 /* Make the RTL_EXPR node temporary, not momentary, so that the
3702 trampoline_list doesn't become garbage. */
3703 int momentary = suspend_momentary ();
3704 rtlexp = make_node (RTL_EXPR);
3705 resume_momentary (momentary);
3706
3707 RTL_EXPR_RTL (rtlexp) = tramp;
3708 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
3709 }
3710
3711 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
3712 return round_trampoline_addr (tramp);
3713 }
3714
3715 /* Given a trampoline address,
3716 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3717
3718 static rtx
3719 round_trampoline_addr (tramp)
3720 rtx tramp;
3721 {
3722 #ifdef TRAMPOLINE_ALIGNMENT
3723 /* Round address up to desired boundary. */
3724 rtx temp = gen_reg_rtx (Pmode);
3725 temp = expand_binop (Pmode, add_optab, tramp,
3726 GEN_INT (TRAMPOLINE_ALIGNMENT - 1),
3727 temp, 0, OPTAB_LIB_WIDEN);
3728 tramp = expand_binop (Pmode, and_optab, temp,
3729 GEN_INT (- TRAMPOLINE_ALIGNMENT),
3730 temp, 0, OPTAB_LIB_WIDEN);
3731 #endif
3732 return tramp;
3733 }
3734 \f
3735 /* The functions identify_blocks and reorder_blocks provide a way to
3736 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
3737 duplicate portions of the RTL code. Call identify_blocks before
3738 changing the RTL, and call reorder_blocks after. */
3739
3740 static int all_blocks ();
3741 static tree blocks_nreverse ();
3742
3743 /* Put all this function's BLOCK nodes into a vector, and return it.
3744 Also store in each NOTE for the beginning or end of a block
3745 the index of that block in the vector.
3746 The arguments are TOP_BLOCK, the top-level block of the function,
3747 and INSNS, the insn chain of the function. */
3748
3749 tree *
3750 identify_blocks (top_block, insns)
3751 tree top_block;
3752 rtx insns;
3753 {
3754 int n_blocks;
3755 tree *block_vector;
3756 int *block_stack;
3757 int depth = 0;
3758 int next_block_number = 0;
3759 int current_block_number = 0;
3760 rtx insn;
3761
3762 if (top_block == 0)
3763 return 0;
3764
3765 n_blocks = all_blocks (top_block, 0);
3766 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
3767 block_stack = (int *) alloca (n_blocks * sizeof (int));
3768
3769 all_blocks (top_block, block_vector);
3770
3771 for (insn = insns; insn; insn = NEXT_INSN (insn))
3772 if (GET_CODE (insn) == NOTE)
3773 {
3774 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3775 {
3776 block_stack[depth++] = current_block_number;
3777 current_block_number = next_block_number;
3778 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
3779 }
3780 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3781 {
3782 current_block_number = block_stack[--depth];
3783 NOTE_BLOCK_NUMBER (insn) = current_block_number;
3784 }
3785 }
3786
3787 return block_vector;
3788 }
3789
3790 /* Given BLOCK_VECTOR which was returned by identify_blocks,
3791 and a revised instruction chain, rebuild the tree structure
3792 of BLOCK nodes to correspond to the new order of RTL.
3793 The new block tree is inserted below TOP_BLOCK.
3794 Returns the current top-level block. */
3795
3796 tree
3797 reorder_blocks (block_vector, top_block, insns)
3798 tree *block_vector;
3799 tree top_block;
3800 rtx insns;
3801 {
3802 tree current_block = top_block;
3803 rtx insn;
3804
3805 if (block_vector == 0)
3806 return top_block;
3807
3808 /* Prune the old tree away, so that it doesn't get in the way. */
3809 BLOCK_SUBBLOCKS (current_block) = 0;
3810
3811 for (insn = insns; insn; insn = NEXT_INSN (insn))
3812 if (GET_CODE (insn) == NOTE)
3813 {
3814 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3815 {
3816 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
3817 /* If we have seen this block before, copy it. */
3818 if (TREE_ASM_WRITTEN (block))
3819 block = copy_node (block);
3820 BLOCK_SUBBLOCKS (block) = 0;
3821 TREE_ASM_WRITTEN (block) = 1;
3822 BLOCK_SUPERCONTEXT (block) = current_block;
3823 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3824 BLOCK_SUBBLOCKS (current_block) = block;
3825 current_block = block;
3826 NOTE_SOURCE_FILE (insn) = 0;
3827 }
3828 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3829 {
3830 BLOCK_SUBBLOCKS (current_block)
3831 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3832 current_block = BLOCK_SUPERCONTEXT (current_block);
3833 NOTE_SOURCE_FILE (insn) = 0;
3834 }
3835 }
3836
3837 return current_block;
3838 }
3839
3840 /* Reverse the order of elements in the chain T of blocks,
3841 and return the new head of the chain (old last element). */
3842
3843 static tree
3844 blocks_nreverse (t)
3845 tree t;
3846 {
3847 register tree prev = 0, decl, next;
3848 for (decl = t; decl; decl = next)
3849 {
3850 next = BLOCK_CHAIN (decl);
3851 BLOCK_CHAIN (decl) = prev;
3852 prev = decl;
3853 }
3854 return prev;
3855 }
3856
3857 /* Count the subblocks of BLOCK, and list them all into the vector VECTOR.
3858 Also clear TREE_ASM_WRITTEN in all blocks. */
3859
3860 static int
3861 all_blocks (block, vector)
3862 tree block;
3863 tree *vector;
3864 {
3865 int n_blocks = 1;
3866 tree subblocks;
3867
3868 TREE_ASM_WRITTEN (block) = 0;
3869 /* Record this block. */
3870 if (vector)
3871 vector[0] = block;
3872
3873 /* Record the subblocks, and their subblocks. */
3874 for (subblocks = BLOCK_SUBBLOCKS (block);
3875 subblocks; subblocks = BLOCK_CHAIN (subblocks))
3876 n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0);
3877
3878 return n_blocks;
3879 }
3880 \f
3881 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3882 and initialize static variables for generating RTL for the statements
3883 of the function. */
3884
3885 void
3886 init_function_start (subr, filename, line)
3887 tree subr;
3888 char *filename;
3889 int line;
3890 {
3891 char *junk;
3892
3893 init_stmt_for_function ();
3894
3895 cse_not_expected = ! optimize;
3896
3897 /* Caller save not needed yet. */
3898 caller_save_needed = 0;
3899
3900 /* No stack slots have been made yet. */
3901 stack_slot_list = 0;
3902
3903 /* There is no stack slot for handling nonlocal gotos. */
3904 nonlocal_goto_handler_slot = 0;
3905 nonlocal_goto_stack_level = 0;
3906
3907 /* No labels have been declared for nonlocal use. */
3908 nonlocal_labels = 0;
3909
3910 /* No function calls so far in this function. */
3911 function_call_count = 0;
3912
3913 /* No parm regs have been allocated.
3914 (This is important for output_inline_function.) */
3915 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3916
3917 /* Initialize the RTL mechanism. */
3918 init_emit ();
3919
3920 /* Initialize the queue of pending postincrement and postdecrements,
3921 and some other info in expr.c. */
3922 init_expr ();
3923
3924 /* We haven't done register allocation yet. */
3925 reg_renumber = 0;
3926
3927 init_const_rtx_hash_table ();
3928
3929 current_function_name = (*decl_printable_name) (subr, &junk);
3930
3931 /* Nonzero if this is a nested function that uses a static chain. */
3932
3933 current_function_needs_context
3934 = (decl_function_context (current_function_decl) != 0);
3935
3936 /* Set if a call to setjmp is seen. */
3937 current_function_calls_setjmp = 0;
3938
3939 /* Set if a call to longjmp is seen. */
3940 current_function_calls_longjmp = 0;
3941
3942 current_function_calls_alloca = 0;
3943 current_function_has_nonlocal_label = 0;
3944 current_function_contains_functions = 0;
3945
3946 current_function_returns_pcc_struct = 0;
3947 current_function_returns_struct = 0;
3948 current_function_epilogue_delay_list = 0;
3949 current_function_uses_const_pool = 0;
3950 current_function_uses_pic_offset_table = 0;
3951
3952 /* We have not yet needed to make a label to jump to for tail-recursion. */
3953 tail_recursion_label = 0;
3954
3955 /* We haven't had a need to make a save area for ap yet. */
3956
3957 arg_pointer_save_area = 0;
3958
3959 /* No stack slots allocated yet. */
3960 frame_offset = 0;
3961
3962 /* No SAVE_EXPRs in this function yet. */
3963 save_expr_regs = 0;
3964
3965 /* No RTL_EXPRs in this function yet. */
3966 rtl_expr_chain = 0;
3967
3968 /* We have not allocated any temporaries yet. */
3969 temp_slots = 0;
3970 temp_slot_level = 0;
3971
3972 /* Within function body, compute a type's size as soon it is laid out. */
3973 immediate_size_expand++;
3974
3975 init_pending_stack_adjust ();
3976 inhibit_defer_pop = 0;
3977
3978 current_function_outgoing_args_size = 0;
3979
3980 /* Initialize the insn lengths. */
3981 init_insn_lengths ();
3982
3983 /* Prevent ever trying to delete the first instruction of a function.
3984 Also tell final how to output a linenum before the function prologue. */
3985 emit_line_note (filename, line);
3986
3987 /* Make sure first insn is a note even if we don't want linenums.
3988 This makes sure the first insn will never be deleted.
3989 Also, final expects a note to appear there. */
3990 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3991
3992 /* Set flags used by final.c. */
3993 if (aggregate_value_p (DECL_RESULT (subr)))
3994 {
3995 #ifdef PCC_STATIC_STRUCT_RETURN
3996 if (flag_pcc_struct_return)
3997 current_function_returns_pcc_struct = 1;
3998 else
3999 #endif
4000 current_function_returns_struct = 1;
4001 }
4002
4003 /* Warn if this value is an aggregate type,
4004 regardless of which calling convention we are using for it. */
4005 if (warn_aggregate_return
4006 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
4007 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
4008 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
4009 warning ("function returns an aggregate");
4010
4011 current_function_returns_pointer
4012 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
4013
4014 /* Indicate that we need to distinguish between the return value of the
4015 present function and the return value of a function being called. */
4016 rtx_equal_function_value_matters = 1;
4017
4018 /* Indicate that we have not instantiated virtual registers yet. */
4019 virtuals_instantiated = 0;
4020
4021 /* Indicate we have no need of a frame pointer yet. */
4022 frame_pointer_needed = 0;
4023
4024 /* By default assume not varargs. */
4025 current_function_varargs = 0;
4026 }
4027
4028 /* Indicate that the current function uses extra args
4029 not explicitly mentioned in the argument list in any fashion. */
4030
4031 void
4032 mark_varargs ()
4033 {
4034 current_function_varargs = 1;
4035 }
4036
4037 /* Expand a call to __main at the beginning of a possible main function. */
4038
4039 void
4040 expand_main_function ()
4041 {
4042 #if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
4043 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__main"), 0,
4044 VOIDmode, 0);
4045 #endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
4046 }
4047 \f
4048 /* Start the RTL for a new function, and set variables used for
4049 emitting RTL.
4050 SUBR is the FUNCTION_DECL node.
4051 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4052 the function's parameters, which must be run at any return statement. */
4053
4054 void
4055 expand_function_start (subr, parms_have_cleanups)
4056 tree subr;
4057 int parms_have_cleanups;
4058 {
4059 register int i;
4060 tree tem;
4061 rtx last_ptr;
4062
4063 /* Make sure volatile mem refs aren't considered
4064 valid operands of arithmetic insns. */
4065 init_recog_no_volatile ();
4066
4067 /* If function gets a static chain arg, store it in the stack frame.
4068 Do this first, so it gets the first stack slot offset. */
4069 if (current_function_needs_context)
4070 {
4071 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4072 emit_move_insn (last_ptr, static_chain_incoming_rtx);
4073 }
4074
4075 /* If the parameters of this function need cleaning up, get a label
4076 for the beginning of the code which executes those cleanups. This must
4077 be done before doing anything with return_label. */
4078 if (parms_have_cleanups)
4079 cleanup_label = gen_label_rtx ();
4080 else
4081 cleanup_label = 0;
4082
4083 /* Make the label for return statements to jump to, if this machine
4084 does not have a one-instruction return and uses an epilogue,
4085 or if it returns a structure, or if it has parm cleanups. */
4086 #ifdef HAVE_return
4087 if (cleanup_label == 0 && HAVE_return
4088 && ! current_function_returns_pcc_struct
4089 && ! (current_function_returns_struct && ! optimize))
4090 return_label = 0;
4091 else
4092 return_label = gen_label_rtx ();
4093 #else
4094 return_label = gen_label_rtx ();
4095 #endif
4096
4097 /* Initialize rtx used to return the value. */
4098 /* Do this before assign_parms so that we copy the struct value address
4099 before any library calls that assign parms might generate. */
4100
4101 /* Decide whether to return the value in memory or in a register. */
4102 if (aggregate_value_p (DECL_RESULT (subr)))
4103 {
4104 /* Returning something that won't go in a register. */
4105 register rtx value_address;
4106
4107 #ifdef PCC_STATIC_STRUCT_RETURN
4108 if (current_function_returns_pcc_struct)
4109 {
4110 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4111 value_address = assemble_static_space (size);
4112 }
4113 else
4114 #endif
4115 {
4116 /* Expect to be passed the address of a place to store the value.
4117 If it is passed as an argument, assign_parms will take care of
4118 it. */
4119 if (struct_value_incoming_rtx)
4120 {
4121 value_address = gen_reg_rtx (Pmode);
4122 emit_move_insn (value_address, struct_value_incoming_rtx);
4123 }
4124 }
4125 if (value_address)
4126 DECL_RTL (DECL_RESULT (subr))
4127 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
4128 value_address);
4129 }
4130 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4131 /* If return mode is void, this decl rtl should not be used. */
4132 DECL_RTL (DECL_RESULT (subr)) = 0;
4133 else if (parms_have_cleanups)
4134 {
4135 /* If function will end with cleanup code for parms,
4136 compute the return values into a pseudo reg,
4137 which we will copy into the true return register
4138 after the cleanups are done. */
4139
4140 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
4141 #ifdef PROMOTE_FUNCTION_RETURN
4142 tree type = TREE_TYPE (DECL_RESULT (subr));
4143 int unsignedp = TREE_UNSIGNED (type);
4144
4145 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
4146 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
4147 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
4148 || TREE_CODE (type) == OFFSET_TYPE)
4149 {
4150 PROMOTE_MODE (mode, unsignedp, type);
4151 }
4152 #endif
4153
4154 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
4155 }
4156 else
4157 /* Scalar, returned in a register. */
4158 {
4159 #ifdef FUNCTION_OUTGOING_VALUE
4160 DECL_RTL (DECL_RESULT (subr))
4161 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4162 #else
4163 DECL_RTL (DECL_RESULT (subr))
4164 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4165 #endif
4166
4167 /* Mark this reg as the function's return value. */
4168 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
4169 {
4170 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
4171 /* Needed because we may need to move this to memory
4172 in case it's a named return value whose address is taken. */
4173 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4174 }
4175 }
4176
4177 /* Initialize rtx for parameters and local variables.
4178 In some cases this requires emitting insns. */
4179
4180 assign_parms (subr, 0);
4181
4182 /* The following was moved from init_function_start.
4183 The move is supposed to make sdb output more accurate. */
4184 /* Indicate the beginning of the function body,
4185 as opposed to parm setup. */
4186 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
4187
4188 /* If doing stupid allocation, mark parms as born here. */
4189
4190 if (GET_CODE (get_last_insn ()) != NOTE)
4191 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4192 parm_birth_insn = get_last_insn ();
4193
4194 if (obey_regdecls)
4195 {
4196 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4197 use_variable (regno_reg_rtx[i]);
4198
4199 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4200 use_variable (current_function_internal_arg_pointer);
4201 }
4202
4203 /* Fetch static chain values for containing functions. */
4204 tem = decl_function_context (current_function_decl);
4205 /* If not doing stupid register allocation, then start off with the static
4206 chain pointer in a pseudo register. Otherwise, we use the stack
4207 address that was generated above. */
4208 if (tem && ! obey_regdecls)
4209 last_ptr = copy_to_reg (static_chain_incoming_rtx);
4210 context_display = 0;
4211 while (tem)
4212 {
4213 tree rtlexp = make_node (RTL_EXPR);
4214
4215 RTL_EXPR_RTL (rtlexp) = last_ptr;
4216 context_display = tree_cons (tem, rtlexp, context_display);
4217 tem = decl_function_context (tem);
4218 if (tem == 0)
4219 break;
4220 /* Chain thru stack frames, assuming pointer to next lexical frame
4221 is found at the place we always store it. */
4222 #ifdef FRAME_GROWS_DOWNWARD
4223 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
4224 #endif
4225 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
4226 memory_address (Pmode, last_ptr)));
4227 }
4228
4229 /* After the display initializations is where the tail-recursion label
4230 should go, if we end up needing one. Ensure we have a NOTE here
4231 since some things (like trampolines) get placed before this. */
4232 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
4233
4234 /* Evaluate now the sizes of any types declared among the arguments. */
4235 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
4236 expand_expr (TREE_VALUE (tem), NULL_RTX, VOIDmode, 0);
4237
4238 /* Make sure there is a line number after the function entry setup code. */
4239 force_next_line_note ();
4240 }
4241 \f
4242 /* Generate RTL for the end of the current function.
4243 FILENAME and LINE are the current position in the source file. */
4244
4245 /* It is up to language-specific callers to do cleanups for parameters. */
4246
4247 void
4248 expand_function_end (filename, line)
4249 char *filename;
4250 int line;
4251 {
4252 register int i;
4253 tree link;
4254
4255 static rtx initial_trampoline;
4256
4257 #ifdef NON_SAVING_SETJMP
4258 /* Don't put any variables in registers if we call setjmp
4259 on a machine that fails to restore the registers. */
4260 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
4261 {
4262 setjmp_protect (DECL_INITIAL (current_function_decl));
4263 setjmp_protect_args ();
4264 }
4265 #endif
4266
4267 /* Save the argument pointer if a save area was made for it. */
4268 if (arg_pointer_save_area)
4269 {
4270 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
4271 emit_insn_before (x, tail_recursion_reentry);
4272 }
4273
4274 /* Initialize any trampolines required by this function. */
4275 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4276 {
4277 tree function = TREE_PURPOSE (link);
4278 rtx context = lookup_static_chain (function);
4279 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
4280 rtx seq;
4281
4282 /* First make sure this compilation has a template for
4283 initializing trampolines. */
4284 if (initial_trampoline == 0)
4285 {
4286 end_temporary_allocation ();
4287 initial_trampoline
4288 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
4289 resume_temporary_allocation ();
4290 }
4291
4292 /* Generate insns to initialize the trampoline. */
4293 start_sequence ();
4294 tramp = change_address (initial_trampoline, BLKmode,
4295 round_trampoline_addr (XEXP (tramp, 0)));
4296 emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE),
4297 FUNCTION_BOUNDARY / BITS_PER_UNIT);
4298 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
4299 XEXP (DECL_RTL (function), 0), context);
4300 seq = get_insns ();
4301 end_sequence ();
4302
4303 /* Put those insns at entry to the containing function (this one). */
4304 emit_insns_before (seq, tail_recursion_reentry);
4305 }
4306 /* Clear the trampoline_list for the next function. */
4307 trampoline_list = 0;
4308
4309 #if 0 /* I think unused parms are legitimate enough. */
4310 /* Warn about unused parms. */
4311 if (warn_unused)
4312 {
4313 rtx decl;
4314
4315 for (decl = DECL_ARGUMENTS (current_function_decl);
4316 decl; decl = TREE_CHAIN (decl))
4317 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
4318 warning_with_decl (decl, "unused parameter `%s'");
4319 }
4320 #endif
4321
4322 /* Delete handlers for nonlocal gotos if nothing uses them. */
4323 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
4324 delete_handlers ();
4325
4326 /* End any sequences that failed to be closed due to syntax errors. */
4327 while (in_sequence_p ())
4328 end_sequence ();
4329
4330 /* Outside function body, can't compute type's actual size
4331 until next function's body starts. */
4332 immediate_size_expand--;
4333
4334 /* If doing stupid register allocation,
4335 mark register parms as dying here. */
4336
4337 if (obey_regdecls)
4338 {
4339 rtx tem;
4340 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4341 use_variable (regno_reg_rtx[i]);
4342
4343 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
4344
4345 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
4346 {
4347 use_variable (XEXP (tem, 0));
4348 use_variable_after (XEXP (tem, 0), parm_birth_insn);
4349 }
4350
4351 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4352 use_variable (current_function_internal_arg_pointer);
4353 }
4354
4355 clear_pending_stack_adjust ();
4356 do_pending_stack_adjust ();
4357
4358 /* Mark the end of the function body.
4359 If control reaches this insn, the function can drop through
4360 without returning a value. */
4361 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
4362
4363 /* Output a linenumber for the end of the function.
4364 SDB depends on this. */
4365 emit_line_note_force (filename, line);
4366
4367 /* Output the label for the actual return from the function,
4368 if one is expected. This happens either because a function epilogue
4369 is used instead of a return instruction, or because a return was done
4370 with a goto in order to run local cleanups, or because of pcc-style
4371 structure returning. */
4372
4373 if (return_label)
4374 emit_label (return_label);
4375
4376 /* If we had calls to alloca, and this machine needs
4377 an accurate stack pointer to exit the function,
4378 insert some code to save and restore the stack pointer. */
4379 #ifdef EXIT_IGNORE_STACK
4380 if (! EXIT_IGNORE_STACK)
4381 #endif
4382 if (current_function_calls_alloca)
4383 {
4384 rtx tem = 0;
4385
4386 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4387 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4388 }
4389
4390 /* If scalar return value was computed in a pseudo-reg,
4391 copy that to the hard return register. */
4392 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
4393 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
4394 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
4395 >= FIRST_PSEUDO_REGISTER))
4396 {
4397 rtx real_decl_result;
4398
4399 #ifdef FUNCTION_OUTGOING_VALUE
4400 real_decl_result
4401 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4402 current_function_decl);
4403 #else
4404 real_decl_result
4405 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4406 current_function_decl);
4407 #endif
4408 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
4409 emit_move_insn (real_decl_result,
4410 DECL_RTL (DECL_RESULT (current_function_decl)));
4411 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
4412 }
4413
4414 /* If returning a structure, arrange to return the address of the value
4415 in a place where debuggers expect to find it.
4416
4417 If returning a structure PCC style,
4418 the caller also depends on this value.
4419 And current_function_returns_pcc_struct is not necessarily set. */
4420 if (current_function_returns_struct
4421 || current_function_returns_pcc_struct)
4422 {
4423 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4424 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4425 #ifdef FUNCTION_OUTGOING_VALUE
4426 rtx outgoing
4427 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4428 current_function_decl);
4429 #else
4430 rtx outgoing
4431 = FUNCTION_VALUE (build_pointer_type (type),
4432 current_function_decl);
4433 #endif
4434
4435 /* Mark this as a function return value so integrate will delete the
4436 assignment and USE below when inlining this function. */
4437 REG_FUNCTION_VALUE_P (outgoing) = 1;
4438
4439 emit_move_insn (outgoing, value_address);
4440 use_variable (outgoing);
4441 }
4442
4443 /* Output a return insn if we are using one.
4444 Otherwise, let the rtl chain end here, to drop through
4445 into the epilogue. */
4446
4447 #ifdef HAVE_return
4448 if (HAVE_return)
4449 {
4450 emit_jump_insn (gen_return ());
4451 emit_barrier ();
4452 }
4453 #endif
4454
4455 /* Fix up any gotos that jumped out to the outermost
4456 binding level of the function.
4457 Must follow emitting RETURN_LABEL. */
4458
4459 /* If you have any cleanups to do at this point,
4460 and they need to create temporary variables,
4461 then you will lose. */
4462 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0);
4463 }
4464 \f
4465 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
4466
4467 static int *prologue;
4468 static int *epilogue;
4469
4470 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
4471 or a single insn). */
4472
4473 static int *
4474 record_insns (insns)
4475 rtx insns;
4476 {
4477 int *vec;
4478
4479 if (GET_CODE (insns) == SEQUENCE)
4480 {
4481 int len = XVECLEN (insns, 0);
4482 vec = (int *) oballoc ((len + 1) * sizeof (int));
4483 vec[len] = 0;
4484 while (--len >= 0)
4485 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
4486 }
4487 else
4488 {
4489 vec = (int *) oballoc (2 * sizeof (int));
4490 vec[0] = INSN_UID (insns);
4491 vec[1] = 0;
4492 }
4493 return vec;
4494 }
4495
4496 /* Determine how many INSN_UIDs in VEC are part of INSN. */
4497
4498 static int
4499 contains (insn, vec)
4500 rtx insn;
4501 int *vec;
4502 {
4503 register int i, j;
4504
4505 if (GET_CODE (insn) == INSN
4506 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4507 {
4508 int count = 0;
4509 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4510 for (j = 0; vec[j]; j++)
4511 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
4512 count++;
4513 return count;
4514 }
4515 else
4516 {
4517 for (j = 0; vec[j]; j++)
4518 if (INSN_UID (insn) == vec[j])
4519 return 1;
4520 }
4521 return 0;
4522 }
4523
4524 /* Generate the prologe and epilogue RTL if the machine supports it. Thread
4525 this into place with notes indicating where the prologue ends and where
4526 the epilogue begins. Update the basic block information when possible. */
4527
4528 void
4529 thread_prologue_and_epilogue_insns (f)
4530 rtx f;
4531 {
4532 #ifdef HAVE_prologue
4533 if (HAVE_prologue)
4534 {
4535 rtx head, seq, insn;
4536
4537 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
4538 prologue insns and a NOTE_INSN_PROLOGUE_END. */
4539 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
4540 seq = gen_prologue ();
4541 head = emit_insn_after (seq, f);
4542
4543 /* Include the new prologue insns in the first block. Ignore them
4544 if they form a basic block unto themselves. */
4545 if (basic_block_head && n_basic_blocks
4546 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
4547 basic_block_head[0] = NEXT_INSN (f);
4548
4549 /* Retain a map of the prologue insns. */
4550 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
4551 }
4552 else
4553 #endif
4554 prologue = 0;
4555
4556 #ifdef HAVE_epilogue
4557 if (HAVE_epilogue)
4558 {
4559 rtx insn = get_last_insn ();
4560 rtx prev = prev_nonnote_insn (insn);
4561
4562 /* If we end with a BARRIER, we don't need an epilogue. */
4563 if (! (prev && GET_CODE (prev) == BARRIER))
4564 {
4565 rtx tail, seq;
4566
4567 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG,
4568 the epilogue insns (this must include the jump insn that
4569 returns), USE insns ad the end of a function, and a BARRIER. */
4570
4571 emit_barrier_after (insn);
4572
4573 /* Place the epilogue before the USE insns at the end of a
4574 function. */
4575 while (prev
4576 && GET_CODE (prev) == INSN
4577 && GET_CODE (PATTERN (prev)) == USE)
4578 {
4579 insn = PREV_INSN (prev);
4580 prev = prev_nonnote_insn (prev);
4581 }
4582
4583 seq = gen_epilogue ();
4584 tail = emit_jump_insn_after (seq, insn);
4585 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
4586
4587 /* Include the new epilogue insns in the last block. Ignore
4588 them if they form a basic block unto themselves. */
4589 if (basic_block_end && n_basic_blocks
4590 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
4591 basic_block_end[n_basic_blocks - 1] = tail;
4592
4593 /* Retain a map of the epilogue insns. */
4594 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
4595 return;
4596 }
4597 }
4598 #endif
4599 epilogue = 0;
4600 }
4601
4602 /* Reposition the prologue-end and epilogue-begin notes after instruction
4603 scheduling and delayed branch scheduling. */
4604
4605 void
4606 reposition_prologue_and_epilogue_notes (f)
4607 rtx f;
4608 {
4609 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
4610 /* Reposition the prologue and epilogue notes. */
4611 if (n_basic_blocks)
4612 {
4613 rtx next, prev;
4614 int len;
4615
4616 if (prologue)
4617 {
4618 register rtx insn, note = 0;
4619
4620 /* Scan from the beginning until we reach the last prologue insn.
4621 We apparently can't depend on basic_block_{head,end} after
4622 reorg has run. */
4623 for (len = 0; prologue[len]; len++)
4624 ;
4625 for (insn = f; insn; insn = NEXT_INSN (insn))
4626 if (GET_CODE (insn) == NOTE)
4627 {
4628 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
4629 note = insn;
4630 }
4631 else if ((len -= contains (insn, prologue)) == 0)
4632 {
4633 /* Find the prologue-end note if we haven't already, and
4634 move it to just after the last prologue insn. */
4635 if (note == 0)
4636 for (note = insn; note = NEXT_INSN (note);)
4637 if (GET_CODE (note) == NOTE
4638 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
4639 break;
4640 next = NEXT_INSN (note);
4641 prev = PREV_INSN (note);
4642 if (prev)
4643 NEXT_INSN (prev) = next;
4644 if (next)
4645 PREV_INSN (next) = prev;
4646 add_insn_after (note, insn);
4647 break;
4648 }
4649 }
4650
4651 if (epilogue)
4652 {
4653 register rtx insn, note = 0;
4654
4655 /* Scan from the end until we reach the first epilogue insn.
4656 We apparently can't depend on basic_block_{head,end} after
4657 reorg has run. */
4658 for (len = 0; epilogue[len]; len++)
4659 ;
4660 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
4661 if (GET_CODE (insn) == NOTE)
4662 {
4663 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4664 note = insn;
4665 }
4666 else if ((len -= contains (insn, epilogue)) == 0)
4667 {
4668 /* Find the epilogue-begin note if we haven't already, and
4669 move it to just before the first epilogue insn. */
4670 if (note == 0)
4671 for (note = insn; note = PREV_INSN (note);)
4672 if (GET_CODE (note) == NOTE
4673 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
4674 break;
4675 next = NEXT_INSN (note);
4676 prev = PREV_INSN (note);
4677 if (prev)
4678 NEXT_INSN (prev) = next;
4679 if (next)
4680 PREV_INSN (next) = prev;
4681 add_insn_after (note, PREV_INSN (insn));
4682 break;
4683 }
4684 }
4685 }
4686 #endif /* HAVE_prologue or HAVE_epilogue */
4687 }
This page took 0.26065 seconds and 6 git commands to generate.