]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
(put_var_into_stack): When we operate on a pseudo
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
29
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
34
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
39
40 #include "config.h"
41
42 #include <stdio.h>
43
44 #include "rtl.h"
45 #include "tree.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57
58 /* Round a value to the lowest integer less than it that is a multiple of
59 the required alignment. Avoid using division in case the value is
60 negative. Assume the alignment is a power of two. */
61 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
62
63 /* Similar, but round to the next highest integer that meets the
64 alignment. */
65 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
66
67 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
68 during rtl generation. If they are different register numbers, this is
69 always true. It may also be true if
70 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
71 generation. See fix_lexical_addr for details. */
72
73 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
74 #define NEED_SEPARATE_AP
75 #endif
76
77 /* Number of bytes of args popped by function being compiled on its return.
78 Zero if no bytes are to be popped.
79 May affect compilation of return insn or of function epilogue. */
80
81 int current_function_pops_args;
82
83 /* Nonzero if function being compiled needs to be given an address
84 where the value should be stored. */
85
86 int current_function_returns_struct;
87
88 /* Nonzero if function being compiled needs to
89 return the address of where it has put a structure value. */
90
91 int current_function_returns_pcc_struct;
92
93 /* Nonzero if function being compiled needs to be passed a static chain. */
94
95 int current_function_needs_context;
96
97 /* Nonzero if function being compiled can call setjmp. */
98
99 int current_function_calls_setjmp;
100
101 /* Nonzero if function being compiled can call longjmp. */
102
103 int current_function_calls_longjmp;
104
105 /* Nonzero if function being compiled receives nonlocal gotos
106 from nested functions. */
107
108 int current_function_has_nonlocal_label;
109
110 /* Nonzero if function being compiled contains nested functions. */
111
112 int current_function_contains_functions;
113
114 /* Nonzero if function being compiled can call alloca,
115 either as a subroutine or builtin. */
116
117 int current_function_calls_alloca;
118
119 /* Nonzero if the current function returns a pointer type */
120
121 int current_function_returns_pointer;
122
123 /* If some insns can be deferred to the delay slots of the epilogue, the
124 delay list for them is recorded here. */
125
126 rtx current_function_epilogue_delay_list;
127
128 /* If function's args have a fixed size, this is that size, in bytes.
129 Otherwise, it is -1.
130 May affect compilation of return insn or of function epilogue. */
131
132 int current_function_args_size;
133
134 /* # bytes the prologue should push and pretend that the caller pushed them.
135 The prologue must do this, but only if parms can be passed in registers. */
136
137 int current_function_pretend_args_size;
138
139 /* # of bytes of outgoing arguments required to be pushed by the prologue.
140 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
141 and no stack adjusts will be done on function calls. */
142
143 int current_function_outgoing_args_size;
144
145 /* This is the offset from the arg pointer to the place where the first
146 anonymous arg can be found, if there is one. */
147
148 rtx current_function_arg_offset_rtx;
149
150 /* Nonzero if current function uses varargs.h or equivalent.
151 Zero for functions that use stdarg.h. */
152
153 int current_function_varargs;
154
155 /* Quantities of various kinds of registers
156 used for the current function's args. */
157
158 CUMULATIVE_ARGS current_function_args_info;
159
160 /* Name of function now being compiled. */
161
162 char *current_function_name;
163
164 /* If non-zero, an RTL expression for that location at which the current
165 function returns its result. Always equal to
166 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
167 independently of the tree structures. */
168
169 rtx current_function_return_rtx;
170
171 /* Nonzero if the current function uses the constant pool. */
172
173 int current_function_uses_const_pool;
174
175 /* Nonzero if the current function uses pic_offset_table_rtx. */
176 int current_function_uses_pic_offset_table;
177
178 /* The arg pointer hard register, or the pseudo into which it was copied. */
179 rtx current_function_internal_arg_pointer;
180
181 /* The FUNCTION_DECL for an inline function currently being expanded. */
182 tree inline_function_decl;
183
184 /* Number of function calls seen so far in current function. */
185
186 int function_call_count;
187
188 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
189 (labels to which there can be nonlocal gotos from nested functions)
190 in this function. */
191
192 tree nonlocal_labels;
193
194 /* RTX for stack slot that holds the current handler for nonlocal gotos.
195 Zero when function does not have nonlocal labels. */
196
197 rtx nonlocal_goto_handler_slot;
198
199 /* RTX for stack slot that holds the stack pointer value to restore
200 for a nonlocal goto.
201 Zero when function does not have nonlocal labels. */
202
203 rtx nonlocal_goto_stack_level;
204
205 /* Label that will go on parm cleanup code, if any.
206 Jumping to this label runs cleanup code for parameters, if
207 such code must be run. Following this code is the logical return label. */
208
209 rtx cleanup_label;
210
211 /* Label that will go on function epilogue.
212 Jumping to this label serves as a "return" instruction
213 on machines which require execution of the epilogue on all returns. */
214
215 rtx return_label;
216
217 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
218 So we can mark them all live at the end of the function, if nonopt. */
219 rtx save_expr_regs;
220
221 /* List (chain of EXPR_LISTs) of all stack slots in this function.
222 Made for the sake of unshare_all_rtl. */
223 rtx stack_slot_list;
224
225 /* Chain of all RTL_EXPRs that have insns in them. */
226 tree rtl_expr_chain;
227
228 /* Label to jump back to for tail recursion, or 0 if we have
229 not yet needed one for this function. */
230 rtx tail_recursion_label;
231
232 /* Place after which to insert the tail_recursion_label if we need one. */
233 rtx tail_recursion_reentry;
234
235 /* Location at which to save the argument pointer if it will need to be
236 referenced. There are two cases where this is done: if nonlocal gotos
237 exist, or if vars stored at an offset from the argument pointer will be
238 needed by inner routines. */
239
240 rtx arg_pointer_save_area;
241
242 /* Offset to end of allocated area of stack frame.
243 If stack grows down, this is the address of the last stack slot allocated.
244 If stack grows up, this is the address for the next slot. */
245 int frame_offset;
246
247 /* List (chain of TREE_LISTs) of static chains for containing functions.
248 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
249 in an RTL_EXPR in the TREE_VALUE. */
250 static tree context_display;
251
252 /* List (chain of TREE_LISTs) of trampolines for nested functions.
253 The trampoline sets up the static chain and jumps to the function.
254 We supply the trampoline's address when the function's address is requested.
255
256 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
257 in an RTL_EXPR in the TREE_VALUE. */
258 static tree trampoline_list;
259
260 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
261 static rtx parm_birth_insn;
262
263 #if 0
264 /* Nonzero if a stack slot has been generated whose address is not
265 actually valid. It means that the generated rtl must all be scanned
266 to detect and correct the invalid addresses where they occur. */
267 static int invalid_stack_slot;
268 #endif
269
270 /* Last insn of those whose job was to put parms into their nominal homes. */
271 static rtx last_parm_insn;
272
273 /* 1 + last pseudo register number used for loading a copy
274 of a parameter of this function. */
275 static int max_parm_reg;
276
277 /* Vector indexed by REGNO, containing location on stack in which
278 to put the parm which is nominally in pseudo register REGNO,
279 if we discover that that parm must go in the stack. */
280 static rtx *parm_reg_stack_loc;
281
282 #if 0 /* Turned off because 0 seems to work just as well. */
283 /* Cleanup lists are required for binding levels regardless of whether
284 that binding level has cleanups or not. This node serves as the
285 cleanup list whenever an empty list is required. */
286 static tree empty_cleanup_list;
287 #endif
288
289 /* Nonzero once virtual register instantiation has been done.
290 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
291 static int virtuals_instantiated;
292
293 /* Nonzero if we need to distinguish between the return value of this function
294 and the return value of a function called by this function. This helps
295 integrate.c */
296
297 extern int rtx_equal_function_value_matters;
298
299 void fixup_gotos ();
300
301 static tree round_down ();
302 static rtx round_trampoline_addr ();
303 static rtx fixup_stack_1 ();
304 static void fixup_var_refs ();
305 static void fixup_var_refs_insns ();
306 static void fixup_var_refs_1 ();
307 static void optimize_bit_field ();
308 static void instantiate_decls ();
309 static void instantiate_decls_1 ();
310 static void instantiate_decl ();
311 static int instantiate_virtual_regs_1 ();
312 static rtx fixup_memory_subreg ();
313 static rtx walk_fixup_memory_subreg ();
314 \f
315 /* In order to evaluate some expressions, such as function calls returning
316 structures in memory, we need to temporarily allocate stack locations.
317 We record each allocated temporary in the following structure.
318
319 Associated with each temporary slot is a nesting level. When we pop up
320 one level, all temporaries associated with the previous level are freed.
321 Normally, all temporaries are freed after the execution of the statement
322 in which they were created. However, if we are inside a ({...}) grouping,
323 the result may be in a temporary and hence must be preserved. If the
324 result could be in a temporary, we preserve it if we can determine which
325 one it is in. If we cannot determine which temporary may contain the
326 result, all temporaries are preserved. A temporary is preserved by
327 pretending it was allocated at the previous nesting level.
328
329 Automatic variables are also assigned temporary slots, at the nesting
330 level where they are defined. They are marked a "kept" so that
331 free_temp_slots will not free them. */
332
333 struct temp_slot
334 {
335 /* Points to next temporary slot. */
336 struct temp_slot *next;
337 /* The rtx to used to reference the slot. */
338 rtx slot;
339 /* The size, in units, of the slot. */
340 int size;
341 /* Non-zero if this temporary is currently in use. */
342 char in_use;
343 /* Nesting level at which this slot is being used. */
344 int level;
345 /* Non-zero if this should survive a call to free_temp_slots. */
346 int keep;
347 };
348
349 /* List of all temporaries allocated, both available and in use. */
350
351 struct temp_slot *temp_slots;
352
353 /* Current nesting level for temporaries. */
354
355 int temp_slot_level;
356 \f
357 /* Pointer to chain of `struct function' for containing functions. */
358 struct function *outer_function_chain;
359
360 /* Given a function decl for a containing function,
361 return the `struct function' for it. */
362
363 struct function *
364 find_function_data (decl)
365 tree decl;
366 {
367 struct function *p;
368 for (p = outer_function_chain; p; p = p->next)
369 if (p->decl == decl)
370 return p;
371 abort ();
372 }
373
374 /* Save the current context for compilation of a nested function.
375 This is called from language-specific code.
376 The caller is responsible for saving any language-specific status,
377 since this function knows only about language-independent variables. */
378
379 void
380 push_function_context ()
381 {
382 struct function *p = (struct function *) xmalloc (sizeof (struct function));
383
384 p->next = outer_function_chain;
385 outer_function_chain = p;
386
387 p->name = current_function_name;
388 p->decl = current_function_decl;
389 p->pops_args = current_function_pops_args;
390 p->returns_struct = current_function_returns_struct;
391 p->returns_pcc_struct = current_function_returns_pcc_struct;
392 p->needs_context = current_function_needs_context;
393 p->calls_setjmp = current_function_calls_setjmp;
394 p->calls_longjmp = current_function_calls_longjmp;
395 p->calls_alloca = current_function_calls_alloca;
396 p->has_nonlocal_label = current_function_has_nonlocal_label;
397 p->args_size = current_function_args_size;
398 p->pretend_args_size = current_function_pretend_args_size;
399 p->arg_offset_rtx = current_function_arg_offset_rtx;
400 p->uses_const_pool = current_function_uses_const_pool;
401 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
402 p->internal_arg_pointer = current_function_internal_arg_pointer;
403 p->max_parm_reg = max_parm_reg;
404 p->parm_reg_stack_loc = parm_reg_stack_loc;
405 p->outgoing_args_size = current_function_outgoing_args_size;
406 p->return_rtx = current_function_return_rtx;
407 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
408 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
409 p->nonlocal_labels = nonlocal_labels;
410 p->cleanup_label = cleanup_label;
411 p->return_label = return_label;
412 p->save_expr_regs = save_expr_regs;
413 p->stack_slot_list = stack_slot_list;
414 p->parm_birth_insn = parm_birth_insn;
415 p->frame_offset = frame_offset;
416 p->tail_recursion_label = tail_recursion_label;
417 p->tail_recursion_reentry = tail_recursion_reentry;
418 p->arg_pointer_save_area = arg_pointer_save_area;
419 p->rtl_expr_chain = rtl_expr_chain;
420 p->last_parm_insn = last_parm_insn;
421 p->context_display = context_display;
422 p->trampoline_list = trampoline_list;
423 p->function_call_count = function_call_count;
424 p->temp_slots = temp_slots;
425 p->temp_slot_level = temp_slot_level;
426 p->fixup_var_refs_queue = 0;
427 p->epilogue_delay_list = current_function_epilogue_delay_list;
428
429 save_tree_status (p);
430 save_storage_status (p);
431 save_emit_status (p);
432 init_emit ();
433 save_expr_status (p);
434 save_stmt_status (p);
435 save_varasm_status (p);
436 }
437
438 /* Restore the last saved context, at the end of a nested function.
439 This function is called from language-specific code. */
440
441 void
442 pop_function_context ()
443 {
444 struct function *p = outer_function_chain;
445
446 outer_function_chain = p->next;
447
448 current_function_name = p->name;
449 current_function_decl = p->decl;
450 current_function_pops_args = p->pops_args;
451 current_function_returns_struct = p->returns_struct;
452 current_function_returns_pcc_struct = p->returns_pcc_struct;
453 current_function_needs_context = p->needs_context;
454 current_function_calls_setjmp = p->calls_setjmp;
455 current_function_calls_longjmp = p->calls_longjmp;
456 current_function_calls_alloca = p->calls_alloca;
457 current_function_has_nonlocal_label = p->has_nonlocal_label;
458 current_function_contains_functions = 1;
459 current_function_args_size = p->args_size;
460 current_function_pretend_args_size = p->pretend_args_size;
461 current_function_arg_offset_rtx = p->arg_offset_rtx;
462 current_function_uses_const_pool = p->uses_const_pool;
463 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
464 current_function_internal_arg_pointer = p->internal_arg_pointer;
465 max_parm_reg = p->max_parm_reg;
466 parm_reg_stack_loc = p->parm_reg_stack_loc;
467 current_function_outgoing_args_size = p->outgoing_args_size;
468 current_function_return_rtx = p->return_rtx;
469 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
470 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
471 nonlocal_labels = p->nonlocal_labels;
472 cleanup_label = p->cleanup_label;
473 return_label = p->return_label;
474 save_expr_regs = p->save_expr_regs;
475 stack_slot_list = p->stack_slot_list;
476 parm_birth_insn = p->parm_birth_insn;
477 frame_offset = p->frame_offset;
478 tail_recursion_label = p->tail_recursion_label;
479 tail_recursion_reentry = p->tail_recursion_reentry;
480 arg_pointer_save_area = p->arg_pointer_save_area;
481 rtl_expr_chain = p->rtl_expr_chain;
482 last_parm_insn = p->last_parm_insn;
483 context_display = p->context_display;
484 trampoline_list = p->trampoline_list;
485 function_call_count = p->function_call_count;
486 temp_slots = p->temp_slots;
487 temp_slot_level = p->temp_slot_level;
488 current_function_epilogue_delay_list = p->epilogue_delay_list;
489
490 restore_tree_status (p);
491 restore_storage_status (p);
492 restore_expr_status (p);
493 restore_emit_status (p);
494 restore_stmt_status (p);
495 restore_varasm_status (p);
496
497 /* Finish doing put_var_into_stack for any of our variables
498 which became addressable during the nested function. */
499 {
500 struct var_refs_queue *queue = p->fixup_var_refs_queue;
501 for (; queue; queue = queue->next)
502 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
503 }
504
505 free (p);
506
507 /* Reset variables that have known state during rtx generation. */
508 rtx_equal_function_value_matters = 1;
509 virtuals_instantiated = 0;
510 }
511 \f
512 /* Allocate fixed slots in the stack frame of the current function. */
513
514 /* Return size needed for stack frame based on slots so far allocated.
515 This size counts from zero. It is not rounded to STACK_BOUNDARY;
516 the caller may have to do that. */
517
518 int
519 get_frame_size ()
520 {
521 #ifdef FRAME_GROWS_DOWNWARD
522 return -frame_offset;
523 #else
524 return frame_offset;
525 #endif
526 }
527
528 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
529 with machine mode MODE.
530
531 ALIGN controls the amount of alignment for the address of the slot:
532 0 means according to MODE,
533 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
534 positive specifies alignment boundary in bits.
535
536 We do not round to stack_boundary here. */
537
538 rtx
539 assign_stack_local (mode, size, align)
540 enum machine_mode mode;
541 int size;
542 int align;
543 {
544 register rtx x, addr;
545 int bigend_correction = 0;
546 int alignment;
547
548 if (align == 0)
549 {
550 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
551 if (mode == BLKmode)
552 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
553 }
554 else if (align == -1)
555 {
556 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
557 size = CEIL_ROUND (size, alignment);
558 }
559 else
560 alignment = align / BITS_PER_UNIT;
561
562 /* Round frame offset to that alignment.
563 We must be careful here, since FRAME_OFFSET might be negative and
564 division with a negative dividend isn't as well defined as we might
565 like. So we instead assume that ALIGNMENT is a power of two and
566 use logical operations which are unambiguous. */
567 #ifdef FRAME_GROWS_DOWNWARD
568 frame_offset = FLOOR_ROUND (frame_offset, alignment);
569 #else
570 frame_offset = CEIL_ROUND (frame_offset, alignment);
571 #endif
572
573 /* On a big-endian machine, if we are allocating more space than we will use,
574 use the least significant bytes of those that are allocated. */
575 #if BYTES_BIG_ENDIAN
576 if (mode != BLKmode)
577 bigend_correction = size - GET_MODE_SIZE (mode);
578 #endif
579
580 #ifdef FRAME_GROWS_DOWNWARD
581 frame_offset -= size;
582 #endif
583
584 /* If we have already instantiated virtual registers, return the actual
585 address relative to the frame pointer. */
586 if (virtuals_instantiated)
587 addr = plus_constant (frame_pointer_rtx,
588 (frame_offset + bigend_correction
589 + STARTING_FRAME_OFFSET));
590 else
591 addr = plus_constant (virtual_stack_vars_rtx,
592 frame_offset + bigend_correction);
593
594 #ifndef FRAME_GROWS_DOWNWARD
595 frame_offset += size;
596 #endif
597
598 x = gen_rtx (MEM, mode, addr);
599
600 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
601
602 return x;
603 }
604
605 /* Assign a stack slot in a containing function.
606 First three arguments are same as in preceding function.
607 The last argument specifies the function to allocate in. */
608
609 rtx
610 assign_outer_stack_local (mode, size, align, function)
611 enum machine_mode mode;
612 int size;
613 int align;
614 struct function *function;
615 {
616 register rtx x, addr;
617 int bigend_correction = 0;
618 int alignment;
619
620 /* Allocate in the memory associated with the function in whose frame
621 we are assigning. */
622 push_obstacks (function->function_obstack,
623 function->function_maybepermanent_obstack);
624
625 if (align == 0)
626 {
627 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
628 if (mode == BLKmode)
629 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
630 }
631 else if (align == -1)
632 {
633 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
634 size = CEIL_ROUND (size, alignment);
635 }
636 else
637 alignment = align / BITS_PER_UNIT;
638
639 /* Round frame offset to that alignment. */
640 #ifdef FRAME_GROWS_DOWNWARD
641 frame_offset = FLOOR_ROUND (frame_offset, alignment);
642 #else
643 frame_offset = CEIL_ROUND (frame_offset, alignment);
644 #endif
645
646 /* On a big-endian machine, if we are allocating more space than we will use,
647 use the least significant bytes of those that are allocated. */
648 #if BYTES_BIG_ENDIAN
649 if (mode != BLKmode)
650 bigend_correction = size - GET_MODE_SIZE (mode);
651 #endif
652
653 #ifdef FRAME_GROWS_DOWNWARD
654 function->frame_offset -= size;
655 #endif
656 addr = plus_constant (virtual_stack_vars_rtx,
657 function->frame_offset + bigend_correction);
658 #ifndef FRAME_GROWS_DOWNWARD
659 function->frame_offset += size;
660 #endif
661
662 x = gen_rtx (MEM, mode, addr);
663
664 function->stack_slot_list
665 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
666
667 pop_obstacks ();
668
669 return x;
670 }
671 \f
672 /* Allocate a temporary stack slot and record it for possible later
673 reuse.
674
675 MODE is the machine mode to be given to the returned rtx.
676
677 SIZE is the size in units of the space required. We do no rounding here
678 since assign_stack_local will do any required rounding.
679
680 KEEP is non-zero if this slot is to be retained after a call to
681 free_temp_slots. Automatic variables for a block are allocated with this
682 flag. */
683
684 rtx
685 assign_stack_temp (mode, size, keep)
686 enum machine_mode mode;
687 int size;
688 int keep;
689 {
690 struct temp_slot *p, *best_p = 0;
691
692 /* First try to find an available, already-allocated temporary that is the
693 exact size we require. */
694 for (p = temp_slots; p; p = p->next)
695 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
696 break;
697
698 /* If we didn't find, one, try one that is larger than what we want. We
699 find the smallest such. */
700 if (p == 0)
701 for (p = temp_slots; p; p = p->next)
702 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
703 && (best_p == 0 || best_p->size > p->size))
704 best_p = p;
705
706 /* Make our best, if any, the one to use. */
707 if (best_p)
708 p = best_p;
709
710 /* If we still didn't find one, make a new temporary. */
711 if (p == 0)
712 {
713 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
714 p->size = size;
715 /* If the temp slot mode doesn't indicate the alignment,
716 use the largest possible, so no one will be disappointed. */
717 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
718 p->next = temp_slots;
719 temp_slots = p;
720 }
721
722 p->in_use = 1;
723 p->level = temp_slot_level;
724 p->keep = keep;
725 return p->slot;
726 }
727 \f
728 /* If X could be a reference to a temporary slot, mark that slot as belonging
729 to the to one level higher. If X matched one of our slots, just mark that
730 one. Otherwise, we can't easily predict which it is, so upgrade all of
731 them. Kept slots need not be touched.
732
733 This is called when an ({...}) construct occurs and a statement
734 returns a value in memory. */
735
736 void
737 preserve_temp_slots (x)
738 rtx x;
739 {
740 struct temp_slot *p;
741
742 /* If X is not in memory or is at a constant address, it cannot be in
743 a temporary slot. */
744 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
745 return;
746
747 /* First see if we can find a match. */
748 for (p = temp_slots; p; p = p->next)
749 if (p->in_use && x == p->slot)
750 {
751 p->level--;
752 return;
753 }
754
755 /* Otherwise, preserve all non-kept slots at this level. */
756 for (p = temp_slots; p; p = p->next)
757 if (p->in_use && p->level == temp_slot_level && ! p->keep)
758 p->level--;
759 }
760
761 /* Free all temporaries used so far. This is normally called at the end
762 of generating code for a statement. */
763
764 void
765 free_temp_slots ()
766 {
767 struct temp_slot *p;
768
769 for (p = temp_slots; p; p = p->next)
770 if (p->in_use && p->level == temp_slot_level && ! p->keep)
771 p->in_use = 0;
772 }
773
774 /* Push deeper into the nesting level for stack temporaries. */
775
776 void
777 push_temp_slots ()
778 {
779 /* For GNU C++, we must allow a sequence to be emitted anywhere in
780 the level where the sequence was started. By not changing levels
781 when the compiler is inside a sequence, the temporaries for the
782 sequence and the temporaries will not unwittingly conflict with
783 the temporaries for other sequences and/or code at that level. */
784 if (in_sequence_p ())
785 return;
786
787 temp_slot_level++;
788 }
789
790 /* Pop a temporary nesting level. All slots in use in the current level
791 are freed. */
792
793 void
794 pop_temp_slots ()
795 {
796 struct temp_slot *p;
797
798 /* See comment in push_temp_slots about why we don't change levels
799 in sequences. */
800 if (in_sequence_p ())
801 return;
802
803 for (p = temp_slots; p; p = p->next)
804 if (p->in_use && p->level == temp_slot_level)
805 p->in_use = 0;
806
807 temp_slot_level--;
808 }
809 \f
810 /* Retroactively move an auto variable from a register to a stack slot.
811 This is done when an address-reference to the variable is seen. */
812
813 void
814 put_var_into_stack (decl)
815 tree decl;
816 {
817 register rtx reg;
818 register rtx new = 0;
819 enum machine_mode promoted_mode, decl_mode;
820 struct function *function = 0;
821 tree context = decl_function_context (decl);
822
823 /* Get the current rtl used for this object and it's original mode. */
824 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
825
826 /* No need to do anything if decl has no rtx yet
827 since in that case caller is setting TREE_ADDRESSABLE
828 and a stack slot will be assigned when the rtl is made. */
829 if (reg == 0)
830 return;
831
832 /* Get the declared mode for this object. */
833 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
834 : DECL_MODE (decl));
835 /* Get the mode it's actually stored in. */
836 promoted_mode = GET_MODE (reg);
837
838 /* If this variable comes from an outer function,
839 find that function's saved context. */
840 if (context != current_function_decl)
841 for (function = outer_function_chain; function; function = function->next)
842 if (function->decl == context)
843 break;
844
845 /* If this is a variable-size object with a pseudo to address it,
846 put that pseudo into the stack, if the var is nonlocal. */
847 if (DECL_NONLOCAL (decl)
848 && GET_CODE (reg) == MEM
849 && GET_CODE (XEXP (reg, 0)) == REG
850 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
851 {
852 reg = XEXP (reg, 0);
853 decl_mode = promoted_mode = GET_MODE (reg);
854 }
855 if (GET_CODE (reg) != REG)
856 return;
857
858 if (function)
859 {
860 if (REGNO (reg) < function->max_parm_reg)
861 new = function->parm_reg_stack_loc[REGNO (reg)];
862 if (new == 0)
863 new = assign_outer_stack_local (GET_MODE (reg),
864 GET_MODE_SIZE (decl_mode),
865 0, function);
866 }
867 else
868 {
869 if (REGNO (reg) < max_parm_reg)
870 new = parm_reg_stack_loc[REGNO (reg)];
871 if (new == 0)
872 new = assign_stack_local (GET_MODE (reg),
873 GET_MODE_SIZE (decl_mode), 0);
874 }
875
876 XEXP (reg, 0) = XEXP (new, 0);
877 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
878 REG_USERVAR_P (reg) = 0;
879 PUT_CODE (reg, MEM);
880 PUT_MODE (reg, decl_mode);
881
882 /* If this is a memory ref that contains aggregate components,
883 mark it as such for cse and loop optimize. */
884 MEM_IN_STRUCT_P (reg)
885 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
886 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
887 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
888
889 /* Now make sure that all refs to the variable, previously made
890 when it was a register, are fixed up to be valid again. */
891 if (function)
892 {
893 struct var_refs_queue *temp;
894
895 /* Variable is inherited; fix it up when we get back to its function. */
896 push_obstacks (function->function_obstack,
897 function->function_maybepermanent_obstack);
898 temp
899 = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue));
900 temp->modified = reg;
901 temp->promoted_mode = promoted_mode;
902 temp->unsignedp = TREE_UNSIGNED (TREE_TYPE (decl));
903 temp->next = function->fixup_var_refs_queue;
904 function->fixup_var_refs_queue = temp;
905 pop_obstacks ();
906 }
907 else
908 /* Variable is local; fix it up now. */
909 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (TREE_TYPE (decl)));
910 }
911 \f
912 static void
913 fixup_var_refs (var, promoted_mode, unsignedp)
914 rtx var;
915 enum machine_mode promoted_mode;
916 int unsignedp;
917 {
918 tree pending;
919 rtx first_insn = get_insns ();
920 struct sequence_stack *stack = sequence_stack;
921 tree rtl_exps = rtl_expr_chain;
922
923 /* Must scan all insns for stack-refs that exceed the limit. */
924 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
925
926 /* Scan all pending sequences too. */
927 for (; stack; stack = stack->next)
928 {
929 push_to_sequence (stack->first);
930 fixup_var_refs_insns (var, promoted_mode, unsignedp,
931 stack->first, stack->next != 0);
932 /* Update remembered end of sequence
933 in case we added an insn at the end. */
934 stack->last = get_last_insn ();
935 end_sequence ();
936 }
937
938 /* Scan all waiting RTL_EXPRs too. */
939 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
940 {
941 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
942 if (seq != const0_rtx && seq != 0)
943 {
944 push_to_sequence (seq);
945 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
946 end_sequence ();
947 }
948 }
949 }
950 \f
951 /* This structure is used by the following two functions to record MEMs or
952 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
953 VAR as an address. We need to maintain this list in case two operands of
954 an insn were required to match; in that case we must ensure we use the
955 same replacement. */
956
957 struct fixup_replacement
958 {
959 rtx old;
960 rtx new;
961 struct fixup_replacement *next;
962 };
963
964 /* REPLACEMENTS is a pointer to a list of the above structures and X is
965 some part of an insn. Return a struct fixup_replacement whose OLD
966 value is equal to X. Allocate a new structure if no such entry exists. */
967
968 static struct fixup_replacement *
969 find_fixup_replacement (replacements, x)
970 struct fixup_replacement **replacements;
971 rtx x;
972 {
973 struct fixup_replacement *p;
974
975 /* See if we have already replaced this. */
976 for (p = *replacements; p && p->old != x; p = p->next)
977 ;
978
979 if (p == 0)
980 {
981 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
982 p->old = x;
983 p->new = 0;
984 p->next = *replacements;
985 *replacements = p;
986 }
987
988 return p;
989 }
990
991 /* Scan the insn-chain starting with INSN for refs to VAR
992 and fix them up. TOPLEVEL is nonzero if this chain is the
993 main chain of insns for the current function. */
994
995 static void
996 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
997 rtx var;
998 enum machine_mode promoted_mode;
999 int unsignedp;
1000 rtx insn;
1001 int toplevel;
1002 {
1003 while (insn)
1004 {
1005 rtx next = NEXT_INSN (insn);
1006 rtx note;
1007 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1008 || GET_CODE (insn) == JUMP_INSN)
1009 {
1010 /* The insn to load VAR from a home in the arglist
1011 is now a no-op. When we see it, just delete it. */
1012 if (toplevel
1013 && GET_CODE (PATTERN (insn)) == SET
1014 && SET_DEST (PATTERN (insn)) == var
1015 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1016 {
1017 next = delete_insn (insn);
1018 if (insn == last_parm_insn)
1019 last_parm_insn = PREV_INSN (next);
1020 }
1021 else
1022 {
1023 /* See if we have to do anything to INSN now that VAR is in
1024 memory. If it needs to be loaded into a pseudo, use a single
1025 pseudo for the entire insn in case there is a MATCH_DUP
1026 between two operands. We pass a pointer to the head of
1027 a list of struct fixup_replacements. If fixup_var_refs_1
1028 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1029 it will record them in this list.
1030
1031 If it allocated a pseudo for any replacement, we copy into
1032 it here. */
1033
1034 struct fixup_replacement *replacements = 0;
1035
1036 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1037 &replacements);
1038
1039 while (replacements)
1040 {
1041 if (GET_CODE (replacements->new) == REG)
1042 {
1043 rtx insert_before;
1044 rtx seq;
1045
1046 /* OLD might be a (subreg (mem)). */
1047 if (GET_CODE (replacements->old) == SUBREG)
1048 replacements->old
1049 = fixup_memory_subreg (replacements->old, insn, 0);
1050 else
1051 replacements->old
1052 = fixup_stack_1 (replacements->old, insn);
1053
1054 /* We can not separate USE insns from the CALL_INSN
1055 that they belong to. If this is a CALL_INSN, insert
1056 the move insn before the USE insns preceding it
1057 instead of immediately before the insn. */
1058 if (GET_CODE (insn) == CALL_INSN)
1059 {
1060 insert_before = insn;
1061 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1062 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1063 insert_before = PREV_INSN (insert_before);
1064 }
1065 else
1066 insert_before = insn;
1067
1068 /* If we are changing the mode, do a conversion.
1069 This might be wasteful, but combine.c will
1070 eliminate much of the waste. */
1071
1072 if (GET_MODE (replacements->new)
1073 != GET_MODE (replacements->old))
1074 {
1075 start_sequence ();
1076 convert_move (replacements->new,
1077 replacements->old, unsignedp);
1078 seq = gen_sequence ();
1079 end_sequence ();
1080 }
1081 else
1082 seq = gen_move_insn (replacements->new,
1083 replacements->old);
1084
1085 emit_insn_before (seq, insert_before);
1086 }
1087
1088 replacements = replacements->next;
1089 }
1090 }
1091
1092 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1093 But don't touch other insns referred to by reg-notes;
1094 we will get them elsewhere. */
1095 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1096 if (GET_CODE (note) != INSN_LIST)
1097 XEXP (note, 0) = walk_fixup_memory_subreg (XEXP (note, 0), insn);
1098 }
1099 insn = next;
1100 }
1101 }
1102 \f
1103 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1104 See if the rtx expression at *LOC in INSN needs to be changed.
1105
1106 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1107 contain a list of original rtx's and replacements. If we find that we need
1108 to modify this insn by replacing a memory reference with a pseudo or by
1109 making a new MEM to implement a SUBREG, we consult that list to see if
1110 we have already chosen a replacement. If none has already been allocated,
1111 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1112 or the SUBREG, as appropriate, to the pseudo. */
1113
1114 static void
1115 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1116 register rtx var;
1117 enum machine_mode promoted_mode;
1118 register rtx *loc;
1119 rtx insn;
1120 struct fixup_replacement **replacements;
1121 {
1122 register int i;
1123 register rtx x = *loc;
1124 RTX_CODE code = GET_CODE (x);
1125 register char *fmt;
1126 register rtx tem, tem1;
1127 struct fixup_replacement *replacement;
1128
1129 switch (code)
1130 {
1131 case MEM:
1132 if (var == x)
1133 {
1134 /* If we already have a replacement, use it. Otherwise,
1135 try to fix up this address in case it is invalid. */
1136
1137 replacement = find_fixup_replacement (replacements, var);
1138 if (replacement->new)
1139 {
1140 *loc = replacement->new;
1141 return;
1142 }
1143
1144 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1145
1146 /* Unless we are forcing memory to register or we changed the mode,
1147 we can leave things the way they are if the insn is valid. */
1148
1149 INSN_CODE (insn) = -1;
1150 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1151 && recog_memoized (insn) >= 0)
1152 return;
1153
1154 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1155 return;
1156 }
1157
1158 /* If X contains VAR, we need to unshare it here so that we update
1159 each occurrence separately. But all identical MEMs in one insn
1160 must be replaced with the same rtx because of the possibility of
1161 MATCH_DUPs. */
1162
1163 if (reg_mentioned_p (var, x))
1164 {
1165 replacement = find_fixup_replacement (replacements, x);
1166 if (replacement->new == 0)
1167 replacement->new = copy_most_rtx (x, var);
1168
1169 *loc = x = replacement->new;
1170 }
1171 break;
1172
1173 case REG:
1174 case CC0:
1175 case PC:
1176 case CONST_INT:
1177 case CONST:
1178 case SYMBOL_REF:
1179 case LABEL_REF:
1180 case CONST_DOUBLE:
1181 return;
1182
1183 case SIGN_EXTRACT:
1184 case ZERO_EXTRACT:
1185 /* Note that in some cases those types of expressions are altered
1186 by optimize_bit_field, and do not survive to get here. */
1187 if (XEXP (x, 0) == var
1188 || (GET_CODE (XEXP (x, 0)) == SUBREG
1189 && SUBREG_REG (XEXP (x, 0)) == var))
1190 {
1191 /* Get TEM as a valid MEM in the mode presently in the insn.
1192
1193 We don't worry about the possibility of MATCH_DUP here; it
1194 is highly unlikely and would be tricky to handle. */
1195
1196 tem = XEXP (x, 0);
1197 if (GET_CODE (tem) == SUBREG)
1198 tem = fixup_memory_subreg (tem, insn, 1);
1199 tem = fixup_stack_1 (tem, insn);
1200
1201 /* Unless we want to load from memory, get TEM into the proper mode
1202 for an extract from memory. This can only be done if the
1203 extract is at a constant position and length. */
1204
1205 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1206 && GET_CODE (XEXP (x, 2)) == CONST_INT
1207 && ! mode_dependent_address_p (XEXP (tem, 0))
1208 && ! MEM_VOLATILE_P (tem))
1209 {
1210 enum machine_mode wanted_mode = VOIDmode;
1211 enum machine_mode is_mode = GET_MODE (tem);
1212 int width = INTVAL (XEXP (x, 1));
1213 int pos = INTVAL (XEXP (x, 2));
1214
1215 #ifdef HAVE_extzv
1216 if (GET_CODE (x) == ZERO_EXTRACT)
1217 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1218 #endif
1219 #ifdef HAVE_extv
1220 if (GET_CODE (x) == SIGN_EXTRACT)
1221 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1222 #endif
1223 /* If we have a narrower mode, we can do something. */
1224 if (wanted_mode != VOIDmode
1225 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1226 {
1227 int offset = pos / BITS_PER_UNIT;
1228 rtx old_pos = XEXP (x, 2);
1229 rtx newmem;
1230
1231 /* If the bytes and bits are counted differently, we
1232 must adjust the offset. */
1233 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1234 offset = (GET_MODE_SIZE (is_mode)
1235 - GET_MODE_SIZE (wanted_mode) - offset);
1236 #endif
1237
1238 pos %= GET_MODE_BITSIZE (wanted_mode);
1239
1240 newmem = gen_rtx (MEM, wanted_mode,
1241 plus_constant (XEXP (tem, 0), offset));
1242 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1243 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1244 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1245
1246 /* Make the change and see if the insn remains valid. */
1247 INSN_CODE (insn) = -1;
1248 XEXP (x, 0) = newmem;
1249 XEXP (x, 2) = GEN_INT (pos);
1250
1251 if (recog_memoized (insn) >= 0)
1252 return;
1253
1254 /* Otherwise, restore old position. XEXP (x, 0) will be
1255 restored later. */
1256 XEXP (x, 2) = old_pos;
1257 }
1258 }
1259
1260 /* If we get here, the bitfield extract insn can't accept a memory
1261 reference. Copy the input into a register. */
1262
1263 tem1 = gen_reg_rtx (GET_MODE (tem));
1264 emit_insn_before (gen_move_insn (tem1, tem), insn);
1265 XEXP (x, 0) = tem1;
1266 return;
1267 }
1268 break;
1269
1270 case SUBREG:
1271 if (SUBREG_REG (x) == var)
1272 {
1273 /* If this is a special SUBREG made because VAR was promoted
1274 from a wider mode, replace it with VAR and call ourself
1275 recursively, this time saying that the object previously
1276 had its current mode (by virtue of the SUBREG). */
1277
1278 if (SUBREG_PROMOTED_VAR_P (x))
1279 {
1280 *loc = var;
1281 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1282 return;
1283 }
1284
1285 /* If this SUBREG makes VAR wider, it has become a paradoxical
1286 SUBREG with VAR in memory, but these aren't allowed at this
1287 stage of the compilation. So load VAR into a pseudo and take
1288 a SUBREG of that pseudo. */
1289 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1290 {
1291 replacement = find_fixup_replacement (replacements, var);
1292 if (replacement->new == 0)
1293 replacement->new = gen_reg_rtx (GET_MODE (var));
1294 SUBREG_REG (x) = replacement->new;
1295 return;
1296 }
1297
1298 /* See if we have already found a replacement for this SUBREG.
1299 If so, use it. Otherwise, make a MEM and see if the insn
1300 is recognized. If not, or if we should force MEM into a register,
1301 make a pseudo for this SUBREG. */
1302 replacement = find_fixup_replacement (replacements, x);
1303 if (replacement->new)
1304 {
1305 *loc = replacement->new;
1306 return;
1307 }
1308
1309 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1310
1311 if (! flag_force_mem && recog_memoized (insn) >= 0)
1312 return;
1313
1314 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1315 return;
1316 }
1317 break;
1318
1319 case SET:
1320 /* First do special simplification of bit-field references. */
1321 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1322 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1323 optimize_bit_field (x, insn, 0);
1324 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1325 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1326 optimize_bit_field (x, insn, NULL_PTR);
1327
1328 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1329 insn into a pseudo and store the low part of the pseudo into VAR. */
1330 if (GET_CODE (SET_DEST (x)) == SUBREG
1331 && SUBREG_REG (SET_DEST (x)) == var
1332 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1333 > GET_MODE_SIZE (GET_MODE (var))))
1334 {
1335 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1336 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1337 tem)),
1338 insn);
1339 break;
1340 }
1341
1342 {
1343 rtx dest = SET_DEST (x);
1344 rtx src = SET_SRC (x);
1345 rtx outerdest = dest;
1346
1347 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1348 || GET_CODE (dest) == SIGN_EXTRACT
1349 || GET_CODE (dest) == ZERO_EXTRACT)
1350 dest = XEXP (dest, 0);
1351
1352 if (GET_CODE (src) == SUBREG)
1353 src = XEXP (src, 0);
1354
1355 /* If VAR does not appear at the top level of the SET
1356 just scan the lower levels of the tree. */
1357
1358 if (src != var && dest != var)
1359 break;
1360
1361 /* We will need to rerecognize this insn. */
1362 INSN_CODE (insn) = -1;
1363
1364 #ifdef HAVE_insv
1365 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1366 {
1367 /* Since this case will return, ensure we fixup all the
1368 operands here. */
1369 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1370 insn, replacements);
1371 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1372 insn, replacements);
1373 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1374 insn, replacements);
1375
1376 tem = XEXP (outerdest, 0);
1377
1378 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1379 that may appear inside a ZERO_EXTRACT.
1380 This was legitimate when the MEM was a REG. */
1381 if (GET_CODE (tem) == SUBREG
1382 && SUBREG_REG (tem) == var)
1383 tem = fixup_memory_subreg (tem, insn, 1);
1384 else
1385 tem = fixup_stack_1 (tem, insn);
1386
1387 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1388 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1389 && ! mode_dependent_address_p (XEXP (tem, 0))
1390 && ! MEM_VOLATILE_P (tem))
1391 {
1392 enum machine_mode wanted_mode
1393 = insn_operand_mode[(int) CODE_FOR_insv][0];
1394 enum machine_mode is_mode = GET_MODE (tem);
1395 int width = INTVAL (XEXP (outerdest, 1));
1396 int pos = INTVAL (XEXP (outerdest, 2));
1397
1398 /* If we have a narrower mode, we can do something. */
1399 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1400 {
1401 int offset = pos / BITS_PER_UNIT;
1402 rtx old_pos = XEXP (outerdest, 2);
1403 rtx newmem;
1404
1405 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1406 offset = (GET_MODE_SIZE (is_mode)
1407 - GET_MODE_SIZE (wanted_mode) - offset);
1408 #endif
1409
1410 pos %= GET_MODE_BITSIZE (wanted_mode);
1411
1412 newmem = gen_rtx (MEM, wanted_mode,
1413 plus_constant (XEXP (tem, 0), offset));
1414 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1415 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1416 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1417
1418 /* Make the change and see if the insn remains valid. */
1419 INSN_CODE (insn) = -1;
1420 XEXP (outerdest, 0) = newmem;
1421 XEXP (outerdest, 2) = GEN_INT (pos);
1422
1423 if (recog_memoized (insn) >= 0)
1424 return;
1425
1426 /* Otherwise, restore old position. XEXP (x, 0) will be
1427 restored later. */
1428 XEXP (outerdest, 2) = old_pos;
1429 }
1430 }
1431
1432 /* If we get here, the bit-field store doesn't allow memory
1433 or isn't located at a constant position. Load the value into
1434 a register, do the store, and put it back into memory. */
1435
1436 tem1 = gen_reg_rtx (GET_MODE (tem));
1437 emit_insn_before (gen_move_insn (tem1, tem), insn);
1438 emit_insn_after (gen_move_insn (tem, tem1), insn);
1439 XEXP (outerdest, 0) = tem1;
1440 return;
1441 }
1442 #endif
1443
1444 /* STRICT_LOW_PART is a no-op on memory references
1445 and it can cause combinations to be unrecognizable,
1446 so eliminate it. */
1447
1448 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1449 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1450
1451 /* A valid insn to copy VAR into or out of a register
1452 must be left alone, to avoid an infinite loop here.
1453 If the reference to VAR is by a subreg, fix that up,
1454 since SUBREG is not valid for a memref.
1455 Also fix up the address of the stack slot. */
1456
1457 if ((SET_SRC (x) == var
1458 || (GET_CODE (SET_SRC (x)) == SUBREG
1459 && SUBREG_REG (SET_SRC (x)) == var))
1460 && (GET_CODE (SET_DEST (x)) == REG
1461 || (GET_CODE (SET_DEST (x)) == SUBREG
1462 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1463 && recog_memoized (insn) >= 0)
1464 {
1465 replacement = find_fixup_replacement (replacements, SET_SRC (x));
1466 if (replacement->new)
1467 {
1468 SET_SRC (x) = replacement->new;
1469 return;
1470 }
1471 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1472 SET_SRC (x) = replacement->new
1473 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1474 else
1475 SET_SRC (x) = replacement->new
1476 = fixup_stack_1 (SET_SRC (x), insn);
1477 return;
1478 }
1479
1480 if ((SET_DEST (x) == var
1481 || (GET_CODE (SET_DEST (x)) == SUBREG
1482 && SUBREG_REG (SET_DEST (x)) == var))
1483 && (GET_CODE (SET_SRC (x)) == REG
1484 || (GET_CODE (SET_SRC (x)) == SUBREG
1485 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1486 && recog_memoized (insn) >= 0)
1487 {
1488 if (GET_CODE (SET_DEST (x)) == SUBREG)
1489 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1490 else
1491 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1492 return;
1493 }
1494
1495 /* Otherwise, storing into VAR must be handled specially
1496 by storing into a temporary and copying that into VAR
1497 with a new insn after this one. Note that this case
1498 will be used when storing into a promoted scalar since
1499 the insn will now have different modes on the input
1500 and output and hence will be invalid (except for the case
1501 of setting it to a constant, which does not need any
1502 change if it is valid). We generate extra code in that case,
1503 but combine.c will eliminate it. */
1504
1505 if (dest == var)
1506 {
1507 rtx temp;
1508 rtx fixeddest = SET_DEST (x);
1509
1510 /* STRICT_LOW_PART can be discarded, around a MEM. */
1511 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
1512 fixeddest = XEXP (fixeddest, 0);
1513 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
1514 if (GET_CODE (fixeddest) == SUBREG)
1515 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
1516 else
1517 fixeddest = fixup_stack_1 (fixeddest, insn);
1518
1519 temp = gen_reg_rtx (GET_MODE (SET_SRC (x)) == VOIDmode
1520 ? GET_MODE (fixeddest)
1521 : GET_MODE (SET_SRC (x)));
1522
1523 emit_insn_after (gen_move_insn (fixeddest,
1524 gen_lowpart (GET_MODE (fixeddest),
1525 temp)),
1526 insn);
1527
1528 SET_DEST (x) = temp;
1529 }
1530 }
1531 }
1532
1533 /* Nothing special about this RTX; fix its operands. */
1534
1535 fmt = GET_RTX_FORMAT (code);
1536 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1537 {
1538 if (fmt[i] == 'e')
1539 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
1540 if (fmt[i] == 'E')
1541 {
1542 register int j;
1543 for (j = 0; j < XVECLEN (x, i); j++)
1544 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
1545 insn, replacements);
1546 }
1547 }
1548 }
1549 \f
1550 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1551 return an rtx (MEM:m1 newaddr) which is equivalent.
1552 If any insns must be emitted to compute NEWADDR, put them before INSN.
1553
1554 UNCRITICAL nonzero means accept paradoxical subregs.
1555 This is used for subregs found inside of ZERO_EXTRACTs. */
1556
1557 static rtx
1558 fixup_memory_subreg (x, insn, uncritical)
1559 rtx x;
1560 rtx insn;
1561 int uncritical;
1562 {
1563 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1564 rtx addr = XEXP (SUBREG_REG (x), 0);
1565 enum machine_mode mode = GET_MODE (x);
1566 rtx saved, result;
1567
1568 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1569 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1570 && ! uncritical)
1571 abort ();
1572
1573 #if BYTES_BIG_ENDIAN
1574 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1575 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1576 #endif
1577 addr = plus_constant (addr, offset);
1578 if (!flag_force_addr && memory_address_p (mode, addr))
1579 /* Shortcut if no insns need be emitted. */
1580 return change_address (SUBREG_REG (x), mode, addr);
1581 start_sequence ();
1582 result = change_address (SUBREG_REG (x), mode, addr);
1583 emit_insn_before (gen_sequence (), insn);
1584 end_sequence ();
1585 return result;
1586 }
1587
1588 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1589 Replace subexpressions of X in place.
1590 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1591 Otherwise return X, with its contents possibly altered.
1592
1593 If any insns must be emitted to compute NEWADDR, put them before INSN. */
1594
1595 static rtx
1596 walk_fixup_memory_subreg (x, insn)
1597 register rtx x;
1598 rtx insn;
1599 {
1600 register enum rtx_code code;
1601 register char *fmt;
1602 register int i;
1603
1604 if (x == 0)
1605 return 0;
1606
1607 code = GET_CODE (x);
1608
1609 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
1610 return fixup_memory_subreg (x, insn, 0);
1611
1612 /* Nothing special about this RTX; fix its operands. */
1613
1614 fmt = GET_RTX_FORMAT (code);
1615 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1616 {
1617 if (fmt[i] == 'e')
1618 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn);
1619 if (fmt[i] == 'E')
1620 {
1621 register int j;
1622 for (j = 0; j < XVECLEN (x, i); j++)
1623 XVECEXP (x, i, j)
1624 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn);
1625 }
1626 }
1627 return x;
1628 }
1629 \f
1630 #if 0
1631 /* Fix up any references to stack slots that are invalid memory addresses
1632 because they exceed the maximum range of a displacement. */
1633
1634 void
1635 fixup_stack_slots ()
1636 {
1637 register rtx insn;
1638
1639 /* Did we generate a stack slot that is out of range
1640 or otherwise has an invalid address? */
1641 if (invalid_stack_slot)
1642 {
1643 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1644 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1645 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1646 || GET_CODE (insn) == JUMP_INSN)
1647 fixup_stack_1 (PATTERN (insn), insn);
1648 }
1649 }
1650 #endif
1651
1652 /* For each memory ref within X, if it refers to a stack slot
1653 with an out of range displacement, put the address in a temp register
1654 (emitting new insns before INSN to load these registers)
1655 and alter the memory ref to use that register.
1656 Replace each such MEM rtx with a copy, to avoid clobberage. */
1657
1658 static rtx
1659 fixup_stack_1 (x, insn)
1660 rtx x;
1661 rtx insn;
1662 {
1663 register int i;
1664 register RTX_CODE code = GET_CODE (x);
1665 register char *fmt;
1666
1667 if (code == MEM)
1668 {
1669 register rtx ad = XEXP (x, 0);
1670 /* If we have address of a stack slot but it's not valid
1671 (displacement is too large), compute the sum in a register. */
1672 if (GET_CODE (ad) == PLUS
1673 && GET_CODE (XEXP (ad, 0)) == REG
1674 && REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1675 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER
1676 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1677 {
1678 rtx temp, seq;
1679 if (memory_address_p (GET_MODE (x), ad))
1680 return x;
1681
1682 start_sequence ();
1683 temp = copy_to_reg (ad);
1684 seq = gen_sequence ();
1685 end_sequence ();
1686 emit_insn_before (seq, insn);
1687 return change_address (x, VOIDmode, temp);
1688 }
1689 return x;
1690 }
1691
1692 fmt = GET_RTX_FORMAT (code);
1693 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1694 {
1695 if (fmt[i] == 'e')
1696 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1697 if (fmt[i] == 'E')
1698 {
1699 register int j;
1700 for (j = 0; j < XVECLEN (x, i); j++)
1701 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
1702 }
1703 }
1704 return x;
1705 }
1706 \f
1707 /* Optimization: a bit-field instruction whose field
1708 happens to be a byte or halfword in memory
1709 can be changed to a move instruction.
1710
1711 We call here when INSN is an insn to examine or store into a bit-field.
1712 BODY is the SET-rtx to be altered.
1713
1714 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1715 (Currently this is called only from function.c, and EQUIV_MEM
1716 is always 0.) */
1717
1718 static void
1719 optimize_bit_field (body, insn, equiv_mem)
1720 rtx body;
1721 rtx insn;
1722 rtx *equiv_mem;
1723 {
1724 register rtx bitfield;
1725 int destflag;
1726 rtx seq = 0;
1727 enum machine_mode mode;
1728
1729 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
1730 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
1731 bitfield = SET_DEST (body), destflag = 1;
1732 else
1733 bitfield = SET_SRC (body), destflag = 0;
1734
1735 /* First check that the field being stored has constant size and position
1736 and is in fact a byte or halfword suitably aligned. */
1737
1738 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
1739 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
1740 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
1741 != BLKmode)
1742 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
1743 {
1744 register rtx memref = 0;
1745
1746 /* Now check that the containing word is memory, not a register,
1747 and that it is safe to change the machine mode. */
1748
1749 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
1750 memref = XEXP (bitfield, 0);
1751 else if (GET_CODE (XEXP (bitfield, 0)) == REG
1752 && equiv_mem != 0)
1753 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
1754 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1755 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
1756 memref = SUBREG_REG (XEXP (bitfield, 0));
1757 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1758 && equiv_mem != 0
1759 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
1760 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
1761
1762 if (memref
1763 && ! mode_dependent_address_p (XEXP (memref, 0))
1764 && ! MEM_VOLATILE_P (memref))
1765 {
1766 /* Now adjust the address, first for any subreg'ing
1767 that we are now getting rid of,
1768 and then for which byte of the word is wanted. */
1769
1770 register int offset = INTVAL (XEXP (bitfield, 2));
1771 /* Adjust OFFSET to count bits from low-address byte. */
1772 #if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1773 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
1774 - offset - INTVAL (XEXP (bitfield, 1)));
1775 #endif
1776 /* Adjust OFFSET to count bytes from low-address byte. */
1777 offset /= BITS_PER_UNIT;
1778 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
1779 {
1780 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
1781 #if BYTES_BIG_ENDIAN
1782 offset -= (MIN (UNITS_PER_WORD,
1783 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
1784 - MIN (UNITS_PER_WORD,
1785 GET_MODE_SIZE (GET_MODE (memref))));
1786 #endif
1787 }
1788
1789 memref = change_address (memref, mode,
1790 plus_constant (XEXP (memref, 0), offset));
1791
1792 /* Store this memory reference where
1793 we found the bit field reference. */
1794
1795 if (destflag)
1796 {
1797 validate_change (insn, &SET_DEST (body), memref, 1);
1798 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
1799 {
1800 rtx src = SET_SRC (body);
1801 while (GET_CODE (src) == SUBREG
1802 && SUBREG_WORD (src) == 0)
1803 src = SUBREG_REG (src);
1804 if (GET_MODE (src) != GET_MODE (memref))
1805 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
1806 validate_change (insn, &SET_SRC (body), src, 1);
1807 }
1808 else if (GET_MODE (SET_SRC (body)) != VOIDmode
1809 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
1810 /* This shouldn't happen because anything that didn't have
1811 one of these modes should have got converted explicitly
1812 and then referenced through a subreg.
1813 This is so because the original bit-field was
1814 handled by agg_mode and so its tree structure had
1815 the same mode that memref now has. */
1816 abort ();
1817 }
1818 else
1819 {
1820 rtx dest = SET_DEST (body);
1821
1822 while (GET_CODE (dest) == SUBREG
1823 && SUBREG_WORD (dest) == 0)
1824 dest = SUBREG_REG (dest);
1825
1826 validate_change (insn, &SET_DEST (body), dest, 1);
1827
1828 if (GET_MODE (dest) == GET_MODE (memref))
1829 validate_change (insn, &SET_SRC (body), memref, 1);
1830 else
1831 {
1832 /* Convert the mem ref to the destination mode. */
1833 rtx newreg = gen_reg_rtx (GET_MODE (dest));
1834
1835 start_sequence ();
1836 convert_move (newreg, memref,
1837 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
1838 seq = get_insns ();
1839 end_sequence ();
1840
1841 validate_change (insn, &SET_SRC (body), newreg, 1);
1842 }
1843 }
1844
1845 /* See if we can convert this extraction or insertion into
1846 a simple move insn. We might not be able to do so if this
1847 was, for example, part of a PARALLEL.
1848
1849 If we succeed, write out any needed conversions. If we fail,
1850 it is hard to guess why we failed, so don't do anything
1851 special; just let the optimization be suppressed. */
1852
1853 if (apply_change_group () && seq)
1854 emit_insns_before (seq, insn);
1855 }
1856 }
1857 }
1858 \f
1859 /* These routines are responsible for converting virtual register references
1860 to the actual hard register references once RTL generation is complete.
1861
1862 The following four variables are used for communication between the
1863 routines. They contain the offsets of the virtual registers from their
1864 respective hard registers. */
1865
1866 static int in_arg_offset;
1867 static int var_offset;
1868 static int dynamic_offset;
1869 static int out_arg_offset;
1870
1871 /* In most machines, the stack pointer register is equivalent to the bottom
1872 of the stack. */
1873
1874 #ifndef STACK_POINTER_OFFSET
1875 #define STACK_POINTER_OFFSET 0
1876 #endif
1877
1878 /* If not defined, pick an appropriate default for the offset of dynamically
1879 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1880 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1881
1882 #ifndef STACK_DYNAMIC_OFFSET
1883
1884 #ifdef ACCUMULATE_OUTGOING_ARGS
1885 /* The bottom of the stack points to the actual arguments. If
1886 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1887 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1888 stack space for register parameters is not pushed by the caller, but
1889 rather part of the fixed stack areas and hence not included in
1890 `current_function_outgoing_args_size'. Nevertheless, we must allow
1891 for it when allocating stack dynamic objects. */
1892
1893 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1894 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1895 (current_function_outgoing_args_size \
1896 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
1897
1898 #else
1899 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1900 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
1901 #endif
1902
1903 #else
1904 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
1905 #endif
1906 #endif
1907
1908 /* Pass through the INSNS of function FNDECL and convert virtual register
1909 references to hard register references. */
1910
1911 void
1912 instantiate_virtual_regs (fndecl, insns)
1913 tree fndecl;
1914 rtx insns;
1915 {
1916 rtx insn;
1917
1918 /* Compute the offsets to use for this function. */
1919 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
1920 var_offset = STARTING_FRAME_OFFSET;
1921 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
1922 out_arg_offset = STACK_POINTER_OFFSET;
1923
1924 /* Scan all variables and parameters of this function. For each that is
1925 in memory, instantiate all virtual registers if the result is a valid
1926 address. If not, we do it later. That will handle most uses of virtual
1927 regs on many machines. */
1928 instantiate_decls (fndecl, 1);
1929
1930 /* Initialize recognition, indicating that volatile is OK. */
1931 init_recog ();
1932
1933 /* Scan through all the insns, instantiating every virtual register still
1934 present. */
1935 for (insn = insns; insn; insn = NEXT_INSN (insn))
1936 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1937 || GET_CODE (insn) == CALL_INSN)
1938 {
1939 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1940 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
1941 }
1942
1943 /* Now instantiate the remaining register equivalences for debugging info.
1944 These will not be valid addresses. */
1945 instantiate_decls (fndecl, 0);
1946
1947 /* Indicate that, from now on, assign_stack_local should use
1948 frame_pointer_rtx. */
1949 virtuals_instantiated = 1;
1950 }
1951
1952 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1953 all virtual registers in their DECL_RTL's.
1954
1955 If VALID_ONLY, do this only if the resulting address is still valid.
1956 Otherwise, always do it. */
1957
1958 static void
1959 instantiate_decls (fndecl, valid_only)
1960 tree fndecl;
1961 int valid_only;
1962 {
1963 tree decl;
1964
1965 if (DECL_INLINE (fndecl))
1966 /* When compiling an inline function, the obstack used for
1967 rtl allocation is the maybepermanent_obstack. Calling
1968 `resume_temporary_allocation' switches us back to that
1969 obstack while we process this function's parameters. */
1970 resume_temporary_allocation ();
1971
1972 /* Process all parameters of the function. */
1973 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1974 {
1975 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
1976 valid_only);
1977 instantiate_decl (DECL_INCOMING_RTL (decl),
1978 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
1979 }
1980
1981 /* Now process all variables defined in the function or its subblocks. */
1982 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1983
1984 if (DECL_INLINE (fndecl))
1985 {
1986 /* Save all rtl allocated for this function by raising the
1987 high-water mark on the maybepermanent_obstack. */
1988 preserve_data ();
1989 /* All further rtl allocation is now done in the current_obstack. */
1990 rtl_in_current_obstack ();
1991 }
1992 }
1993
1994 /* Subroutine of instantiate_decls: Process all decls in the given
1995 BLOCK node and all its subblocks. */
1996
1997 static void
1998 instantiate_decls_1 (let, valid_only)
1999 tree let;
2000 int valid_only;
2001 {
2002 tree t;
2003
2004 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2005 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2006 valid_only);
2007
2008 /* Process all subblocks. */
2009 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2010 instantiate_decls_1 (t, valid_only);
2011 }
2012
2013 /* Subroutine of the preceeding procedures: Given RTL representing a
2014 decl and the size of the object, do any instantiation required.
2015
2016 If VALID_ONLY is non-zero, it means that the RTL should only be
2017 changed if the new address is valid. */
2018
2019 static void
2020 instantiate_decl (x, size, valid_only)
2021 rtx x;
2022 int size;
2023 int valid_only;
2024 {
2025 enum machine_mode mode;
2026 rtx addr;
2027
2028 /* If this is not a MEM, no need to do anything. Similarly if the
2029 address is a constant or a register that is not a virtual register. */
2030
2031 if (x == 0 || GET_CODE (x) != MEM)
2032 return;
2033
2034 addr = XEXP (x, 0);
2035 if (CONSTANT_P (addr)
2036 || (GET_CODE (addr) == REG
2037 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2038 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2039 return;
2040
2041 /* If we should only do this if the address is valid, copy the address.
2042 We need to do this so we can undo any changes that might make the
2043 address invalid. This copy is unfortunate, but probably can't be
2044 avoided. */
2045
2046 if (valid_only)
2047 addr = copy_rtx (addr);
2048
2049 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2050
2051 if (! valid_only)
2052 return;
2053
2054 /* Now verify that the resulting address is valid for every integer or
2055 floating-point mode up to and including SIZE bytes long. We do this
2056 since the object might be accessed in any mode and frame addresses
2057 are shared. */
2058
2059 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2060 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2061 mode = GET_MODE_WIDER_MODE (mode))
2062 if (! memory_address_p (mode, addr))
2063 return;
2064
2065 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2066 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2067 mode = GET_MODE_WIDER_MODE (mode))
2068 if (! memory_address_p (mode, addr))
2069 return;
2070
2071 /* Otherwise, put back the address, now that we have updated it and we
2072 know it is valid. */
2073
2074 XEXP (x, 0) = addr;
2075 }
2076 \f
2077 /* Given a pointer to a piece of rtx and an optional pointer to the
2078 containing object, instantiate any virtual registers present in it.
2079
2080 If EXTRA_INSNS, we always do the replacement and generate
2081 any extra insns before OBJECT. If it zero, we do nothing if replacement
2082 is not valid.
2083
2084 Return 1 if we either had nothing to do or if we were able to do the
2085 needed replacement. Return 0 otherwise; we only return zero if
2086 EXTRA_INSNS is zero.
2087
2088 We first try some simple transformations to avoid the creation of extra
2089 pseudos. */
2090
2091 static int
2092 instantiate_virtual_regs_1 (loc, object, extra_insns)
2093 rtx *loc;
2094 rtx object;
2095 int extra_insns;
2096 {
2097 rtx x;
2098 RTX_CODE code;
2099 rtx new = 0;
2100 int offset;
2101 rtx temp;
2102 rtx seq;
2103 int i, j;
2104 char *fmt;
2105
2106 /* Re-start here to avoid recursion in common cases. */
2107 restart:
2108
2109 x = *loc;
2110 if (x == 0)
2111 return 1;
2112
2113 code = GET_CODE (x);
2114
2115 /* Check for some special cases. */
2116 switch (code)
2117 {
2118 case CONST_INT:
2119 case CONST_DOUBLE:
2120 case CONST:
2121 case SYMBOL_REF:
2122 case CODE_LABEL:
2123 case PC:
2124 case CC0:
2125 case ASM_INPUT:
2126 case ADDR_VEC:
2127 case ADDR_DIFF_VEC:
2128 case RETURN:
2129 return 1;
2130
2131 case SET:
2132 /* We are allowed to set the virtual registers. This means that
2133 that the actual register should receive the source minus the
2134 appropriate offset. This is used, for example, in the handling
2135 of non-local gotos. */
2136 if (SET_DEST (x) == virtual_incoming_args_rtx)
2137 new = arg_pointer_rtx, offset = - in_arg_offset;
2138 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2139 new = frame_pointer_rtx, offset = - var_offset;
2140 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2141 new = stack_pointer_rtx, offset = - dynamic_offset;
2142 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2143 new = stack_pointer_rtx, offset = - out_arg_offset;
2144
2145 if (new)
2146 {
2147 /* The only valid sources here are PLUS or REG. Just do
2148 the simplest possible thing to handle them. */
2149 if (GET_CODE (SET_SRC (x)) != REG
2150 && GET_CODE (SET_SRC (x)) != PLUS)
2151 abort ();
2152
2153 start_sequence ();
2154 if (GET_CODE (SET_SRC (x)) != REG)
2155 temp = force_operand (SET_SRC (x), NULL_RTX);
2156 else
2157 temp = SET_SRC (x);
2158 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2159 seq = get_insns ();
2160 end_sequence ();
2161
2162 emit_insns_before (seq, object);
2163 SET_DEST (x) = new;
2164
2165 if (!validate_change (object, &SET_SRC (x), temp, 0)
2166 || ! extra_insns)
2167 abort ();
2168
2169 return 1;
2170 }
2171
2172 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2173 loc = &SET_SRC (x);
2174 goto restart;
2175
2176 case PLUS:
2177 /* Handle special case of virtual register plus constant. */
2178 if (CONSTANT_P (XEXP (x, 1)))
2179 {
2180 rtx old;
2181
2182 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2183 if (GET_CODE (XEXP (x, 0)) == PLUS)
2184 {
2185 rtx inner = XEXP (XEXP (x, 0), 0);
2186
2187 if (inner == virtual_incoming_args_rtx)
2188 new = arg_pointer_rtx, offset = in_arg_offset;
2189 else if (inner == virtual_stack_vars_rtx)
2190 new = frame_pointer_rtx, offset = var_offset;
2191 else if (inner == virtual_stack_dynamic_rtx)
2192 new = stack_pointer_rtx, offset = dynamic_offset;
2193 else if (inner == virtual_outgoing_args_rtx)
2194 new = stack_pointer_rtx, offset = out_arg_offset;
2195 else
2196 {
2197 loc = &XEXP (x, 0);
2198 goto restart;
2199 }
2200
2201 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2202 extra_insns);
2203 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2204 }
2205
2206 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2207 new = arg_pointer_rtx, offset = in_arg_offset;
2208 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2209 new = frame_pointer_rtx, offset = var_offset;
2210 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2211 new = stack_pointer_rtx, offset = dynamic_offset;
2212 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2213 new = stack_pointer_rtx, offset = out_arg_offset;
2214 else
2215 {
2216 /* We know the second operand is a constant. Unless the
2217 first operand is a REG (which has been already checked),
2218 it needs to be checked. */
2219 if (GET_CODE (XEXP (x, 0)) != REG)
2220 {
2221 loc = &XEXP (x, 0);
2222 goto restart;
2223 }
2224 return 1;
2225 }
2226
2227 old = XEXP (x, 0);
2228 XEXP (x, 0) = new;
2229 new = plus_constant (XEXP (x, 1), offset);
2230
2231 /* If the new constant is zero, try to replace the sum with its
2232 first operand. */
2233 if (new == const0_rtx
2234 && validate_change (object, loc, XEXP (x, 0), 0))
2235 return 1;
2236
2237 /* Next try to replace constant with new one. */
2238 if (!validate_change (object, &XEXP (x, 1), new, 0))
2239 {
2240 if (! extra_insns)
2241 {
2242 XEXP (x, 0) = old;
2243 return 0;
2244 }
2245
2246 /* Otherwise copy the new constant into a register and replace
2247 constant with that register. */
2248 temp = gen_reg_rtx (Pmode);
2249 if (validate_change (object, &XEXP (x, 1), temp, 0))
2250 emit_insn_before (gen_move_insn (temp, new), object);
2251 else
2252 {
2253 /* If that didn't work, replace this expression with a
2254 register containing the sum. */
2255
2256 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2257 XEXP (x, 0) = old;
2258
2259 start_sequence ();
2260 temp = force_operand (new, NULL_RTX);
2261 seq = get_insns ();
2262 end_sequence ();
2263
2264 emit_insns_before (seq, object);
2265 if (! validate_change (object, loc, temp, 0)
2266 && ! validate_replace_rtx (x, temp, object))
2267 abort ();
2268 }
2269 }
2270
2271 return 1;
2272 }
2273
2274 /* Fall through to generic two-operand expression case. */
2275 case EXPR_LIST:
2276 case CALL:
2277 case COMPARE:
2278 case MINUS:
2279 case MULT:
2280 case DIV: case UDIV:
2281 case MOD: case UMOD:
2282 case AND: case IOR: case XOR:
2283 case LSHIFT: case ASHIFT: case ROTATE:
2284 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2285 case NE: case EQ:
2286 case GE: case GT: case GEU: case GTU:
2287 case LE: case LT: case LEU: case LTU:
2288 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2289 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2290 loc = &XEXP (x, 0);
2291 goto restart;
2292
2293 case MEM:
2294 /* Most cases of MEM that convert to valid addresses have already been
2295 handled by our scan of regno_reg_rtx. The only special handling we
2296 need here is to make a copy of the rtx to ensure it isn't being
2297 shared if we have to change it to a pseudo.
2298
2299 If the rtx is a simple reference to an address via a virtual register,
2300 it can potentially be shared. In such cases, first try to make it
2301 a valid address, which can also be shared. Otherwise, copy it and
2302 proceed normally.
2303
2304 First check for common cases that need no processing. These are
2305 usually due to instantiation already being done on a previous instance
2306 of a shared rtx. */
2307
2308 temp = XEXP (x, 0);
2309 if (CONSTANT_ADDRESS_P (temp)
2310 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2311 || temp == arg_pointer_rtx
2312 #endif
2313 || temp == frame_pointer_rtx)
2314 return 1;
2315
2316 if (GET_CODE (temp) == PLUS
2317 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2318 && (XEXP (temp, 0) == frame_pointer_rtx
2319 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2320 || XEXP (temp, 0) == arg_pointer_rtx
2321 #endif
2322 ))
2323 return 1;
2324
2325 if (temp == virtual_stack_vars_rtx
2326 || temp == virtual_incoming_args_rtx
2327 || (GET_CODE (temp) == PLUS
2328 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2329 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2330 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2331 {
2332 /* This MEM may be shared. If the substitution can be done without
2333 the need to generate new pseudos, we want to do it in place
2334 so all copies of the shared rtx benefit. The call below will
2335 only make substitutions if the resulting address is still
2336 valid.
2337
2338 Note that we cannot pass X as the object in the recursive call
2339 since the insn being processed may not allow all valid
2340 addresses. However, if we were not passed on object, we can
2341 only modify X without copying it if X will have a valid
2342 address.
2343
2344 ??? Also note that this can still lose if OBJECT is an insn that
2345 has less restrictions on an address that some other insn.
2346 In that case, we will modify the shared address. This case
2347 doesn't seem very likely, though. */
2348
2349 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2350 object ? object : x, 0))
2351 return 1;
2352
2353 /* Otherwise make a copy and process that copy. We copy the entire
2354 RTL expression since it might be a PLUS which could also be
2355 shared. */
2356 *loc = x = copy_rtx (x);
2357 }
2358
2359 /* Fall through to generic unary operation case. */
2360 case USE:
2361 case CLOBBER:
2362 case SUBREG:
2363 case STRICT_LOW_PART:
2364 case NEG: case NOT:
2365 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2366 case SIGN_EXTEND: case ZERO_EXTEND:
2367 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2368 case FLOAT: case FIX:
2369 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2370 case ABS:
2371 case SQRT:
2372 case FFS:
2373 /* These case either have just one operand or we know that we need not
2374 check the rest of the operands. */
2375 loc = &XEXP (x, 0);
2376 goto restart;
2377
2378 case REG:
2379 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2380 in front of this insn and substitute the temporary. */
2381 if (x == virtual_incoming_args_rtx)
2382 new = arg_pointer_rtx, offset = in_arg_offset;
2383 else if (x == virtual_stack_vars_rtx)
2384 new = frame_pointer_rtx, offset = var_offset;
2385 else if (x == virtual_stack_dynamic_rtx)
2386 new = stack_pointer_rtx, offset = dynamic_offset;
2387 else if (x == virtual_outgoing_args_rtx)
2388 new = stack_pointer_rtx, offset = out_arg_offset;
2389
2390 if (new)
2391 {
2392 temp = plus_constant (new, offset);
2393 if (!validate_change (object, loc, temp, 0))
2394 {
2395 if (! extra_insns)
2396 return 0;
2397
2398 start_sequence ();
2399 temp = force_operand (temp, NULL_RTX);
2400 seq = get_insns ();
2401 end_sequence ();
2402
2403 emit_insns_before (seq, object);
2404 if (! validate_change (object, loc, temp, 0)
2405 && ! validate_replace_rtx (x, temp, object))
2406 abort ();
2407 }
2408 }
2409
2410 return 1;
2411 }
2412
2413 /* Scan all subexpressions. */
2414 fmt = GET_RTX_FORMAT (code);
2415 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2416 if (*fmt == 'e')
2417 {
2418 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2419 return 0;
2420 }
2421 else if (*fmt == 'E')
2422 for (j = 0; j < XVECLEN (x, i); j++)
2423 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2424 extra_insns))
2425 return 0;
2426
2427 return 1;
2428 }
2429 \f
2430 /* Optimization: assuming this function does not receive nonlocal gotos,
2431 delete the handlers for such, as well as the insns to establish
2432 and disestablish them. */
2433
2434 static void
2435 delete_handlers ()
2436 {
2437 rtx insn;
2438 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2439 {
2440 /* Delete the handler by turning off the flag that would
2441 prevent jump_optimize from deleting it.
2442 Also permit deletion of the nonlocal labels themselves
2443 if nothing local refers to them. */
2444 if (GET_CODE (insn) == CODE_LABEL)
2445 LABEL_PRESERVE_P (insn) = 0;
2446 if (GET_CODE (insn) == INSN
2447 && ((nonlocal_goto_handler_slot != 0
2448 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
2449 || (nonlocal_goto_stack_level != 0
2450 && reg_mentioned_p (nonlocal_goto_stack_level,
2451 PATTERN (insn)))))
2452 delete_insn (insn);
2453 }
2454 }
2455
2456 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2457 of the current function. */
2458
2459 rtx
2460 nonlocal_label_rtx_list ()
2461 {
2462 tree t;
2463 rtx x = 0;
2464
2465 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2466 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2467
2468 return x;
2469 }
2470 \f
2471 /* Output a USE for any register use in RTL.
2472 This is used with -noreg to mark the extent of lifespan
2473 of any registers used in a user-visible variable's DECL_RTL. */
2474
2475 void
2476 use_variable (rtl)
2477 rtx rtl;
2478 {
2479 if (GET_CODE (rtl) == REG)
2480 /* This is a register variable. */
2481 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2482 else if (GET_CODE (rtl) == MEM
2483 && GET_CODE (XEXP (rtl, 0)) == REG
2484 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2485 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2486 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2487 /* This is a variable-sized structure. */
2488 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2489 }
2490
2491 /* Like use_variable except that it outputs the USEs after INSN
2492 instead of at the end of the insn-chain. */
2493
2494 void
2495 use_variable_after (rtl, insn)
2496 rtx rtl, insn;
2497 {
2498 if (GET_CODE (rtl) == REG)
2499 /* This is a register variable. */
2500 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2501 else if (GET_CODE (rtl) == MEM
2502 && GET_CODE (XEXP (rtl, 0)) == REG
2503 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2504 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2505 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2506 /* This is a variable-sized structure. */
2507 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2508 }
2509 \f
2510 int
2511 max_parm_reg_num ()
2512 {
2513 return max_parm_reg;
2514 }
2515
2516 /* Return the first insn following those generated by `assign_parms'. */
2517
2518 rtx
2519 get_first_nonparm_insn ()
2520 {
2521 if (last_parm_insn)
2522 return NEXT_INSN (last_parm_insn);
2523 return get_insns ();
2524 }
2525
2526 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
2527 Crash if there is none. */
2528
2529 rtx
2530 get_first_block_beg ()
2531 {
2532 register rtx searcher;
2533 register rtx insn = get_first_nonparm_insn ();
2534
2535 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
2536 if (GET_CODE (searcher) == NOTE
2537 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
2538 return searcher;
2539
2540 abort (); /* Invalid call to this function. (See comments above.) */
2541 return NULL_RTX;
2542 }
2543
2544 /* Return 1 if EXP returns an aggregate value, for which an address
2545 must be passed to the function or returned by the function. */
2546
2547 int
2548 aggregate_value_p (exp)
2549 tree exp;
2550 {
2551 int i, regno, nregs;
2552 rtx reg;
2553 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2554 return 1;
2555 if (RETURN_IN_MEMORY (TREE_TYPE (exp)))
2556 return 1;
2557 if (flag_pcc_struct_return
2558 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2559 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE))
2560 return 1;
2561 /* Make sure we have suitable call-clobbered regs to return
2562 the value in; if not, we must return it in memory. */
2563 reg = hard_function_value (TREE_TYPE (exp), 0);
2564 regno = REGNO (reg);
2565 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (TREE_TYPE (exp)));
2566 for (i = 0; i < nregs; i++)
2567 if (! call_used_regs[regno + i])
2568 return 1;
2569 return 0;
2570 }
2571 \f
2572 /* Assign RTL expressions to the function's parameters.
2573 This may involve copying them into registers and using
2574 those registers as the RTL for them.
2575
2576 If SECOND_TIME is non-zero it means that this function is being
2577 called a second time. This is done by integrate.c when a function's
2578 compilation is deferred. We need to come back here in case the
2579 FUNCTION_ARG macro computes items needed for the rest of the compilation
2580 (such as changing which registers are fixed or caller-saved). But suppress
2581 writing any insns or setting DECL_RTL of anything in this case. */
2582
2583 void
2584 assign_parms (fndecl, second_time)
2585 tree fndecl;
2586 int second_time;
2587 {
2588 register tree parm;
2589 register rtx entry_parm = 0;
2590 register rtx stack_parm = 0;
2591 CUMULATIVE_ARGS args_so_far;
2592 enum machine_mode promoted_mode, passed_mode, nominal_mode;
2593 int unsignedp;
2594 /* Total space needed so far for args on the stack,
2595 given as a constant and a tree-expression. */
2596 struct args_size stack_args_size;
2597 tree fntype = TREE_TYPE (fndecl);
2598 tree fnargs = DECL_ARGUMENTS (fndecl);
2599 /* This is used for the arg pointer when referring to stack args. */
2600 rtx internal_arg_pointer;
2601 /* This is a dummy PARM_DECL that we used for the function result if
2602 the function returns a structure. */
2603 tree function_result_decl = 0;
2604 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2605 int varargs_setup = 0;
2606
2607 /* Nonzero if the last arg is named `__builtin_va_alist',
2608 which is used on some machines for old-fashioned non-ANSI varargs.h;
2609 this should be stuck onto the stack as if it had arrived there. */
2610 int vararg
2611 = (fnargs
2612 && (parm = tree_last (fnargs)) != 0
2613 && DECL_NAME (parm)
2614 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2615 "__builtin_va_alist")));
2616
2617 /* Nonzero if function takes extra anonymous args.
2618 This means the last named arg must be on the stack
2619 right before the anonymous ones. */
2620 int stdarg
2621 = (TYPE_ARG_TYPES (fntype) != 0
2622 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2623 != void_type_node));
2624
2625 /* If the reg that the virtual arg pointer will be translated into is
2626 not a fixed reg or is the stack pointer, make a copy of the virtual
2627 arg pointer, and address parms via the copy. The frame pointer is
2628 considered fixed even though it is not marked as such.
2629
2630 The second time through, simply use ap to avoid generating rtx. */
2631
2632 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2633 || ! (fixed_regs[ARG_POINTER_REGNUM]
2634 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2635 && ! second_time)
2636 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2637 else
2638 internal_arg_pointer = virtual_incoming_args_rtx;
2639 current_function_internal_arg_pointer = internal_arg_pointer;
2640
2641 stack_args_size.constant = 0;
2642 stack_args_size.var = 0;
2643
2644 /* If struct value address is treated as the first argument, make it so. */
2645 if (aggregate_value_p (DECL_RESULT (fndecl))
2646 && ! current_function_returns_pcc_struct
2647 && struct_value_incoming_rtx == 0)
2648 {
2649 tree type = build_pointer_type (fntype);
2650
2651 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
2652
2653 DECL_ARG_TYPE (function_result_decl) = type;
2654 TREE_CHAIN (function_result_decl) = fnargs;
2655 fnargs = function_result_decl;
2656 }
2657
2658 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2659 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2660
2661 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2662 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_PTR);
2663 #else
2664 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_PTR);
2665 #endif
2666
2667 /* We haven't yet found an argument that we must push and pretend the
2668 caller did. */
2669 current_function_pretend_args_size = 0;
2670
2671 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2672 {
2673 int aggregate
2674 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2675 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
2676 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE);
2677 struct args_size stack_offset;
2678 struct args_size arg_size;
2679 int passed_pointer = 0;
2680 tree passed_type = DECL_ARG_TYPE (parm);
2681
2682 /* Set LAST_NAMED if this is last named arg before some
2683 anonymous args. We treat it as if it were anonymous too. */
2684 int last_named = ((TREE_CHAIN (parm) == 0
2685 || DECL_NAME (TREE_CHAIN (parm)) == 0)
2686 && (vararg || stdarg));
2687
2688 if (TREE_TYPE (parm) == error_mark_node
2689 /* This can happen after weird syntax errors
2690 or if an enum type is defined among the parms. */
2691 || TREE_CODE (parm) != PARM_DECL
2692 || passed_type == NULL)
2693 {
2694 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
2695 const0_rtx);
2696 TREE_USED (parm) = 1;
2697 continue;
2698 }
2699
2700 /* For varargs.h function, save info about regs and stack space
2701 used by the individual args, not including the va_alist arg. */
2702 if (vararg && last_named)
2703 current_function_args_info = args_so_far;
2704
2705 /* Find mode of arg as it is passed, and mode of arg
2706 as it should be during execution of this function. */
2707 passed_mode = TYPE_MODE (passed_type);
2708 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
2709
2710 /* If the parm's mode is VOID, its value doesn't matter,
2711 and avoid the usual things like emit_move_insn that could crash. */
2712 if (nominal_mode == VOIDmode)
2713 {
2714 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
2715 continue;
2716 }
2717
2718 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2719 /* See if this arg was passed by invisible reference. */
2720 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2721 passed_type, ! last_named))
2722 {
2723 passed_type = build_pointer_type (passed_type);
2724 passed_pointer = 1;
2725 passed_mode = nominal_mode = Pmode;
2726 }
2727 #endif
2728
2729 promoted_mode = passed_mode;
2730
2731 #ifdef PROMOTE_FUNCTION_ARGS
2732 /* Compute the mode in which the arg is actually extended to. */
2733 if (TREE_CODE (passed_type) == INTEGER_TYPE
2734 || TREE_CODE (passed_type) == ENUMERAL_TYPE
2735 || TREE_CODE (passed_type) == BOOLEAN_TYPE
2736 || TREE_CODE (passed_type) == CHAR_TYPE
2737 || TREE_CODE (passed_type) == REAL_TYPE
2738 || TREE_CODE (passed_type) == POINTER_TYPE
2739 || TREE_CODE (passed_type) == OFFSET_TYPE)
2740 {
2741 unsignedp = TREE_UNSIGNED (passed_type);
2742 PROMOTE_MODE (promoted_mode, unsignedp, passed_type);
2743 }
2744 #endif
2745
2746 /* Let machine desc say which reg (if any) the parm arrives in.
2747 0 means it arrives on the stack. */
2748 #ifdef FUNCTION_INCOMING_ARG
2749 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
2750 passed_type, ! last_named);
2751 #else
2752 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
2753 passed_type, ! last_named);
2754 #endif
2755
2756 if (entry_parm)
2757 passed_mode = promoted_mode;
2758
2759 #ifdef SETUP_INCOMING_VARARGS
2760 /* If this is the last named parameter, do any required setup for
2761 varargs or stdargs. We need to know about the case of this being an
2762 addressable type, in which case we skip the registers it
2763 would have arrived in.
2764
2765 For stdargs, LAST_NAMED will be set for two parameters, the one that
2766 is actually the last named, and the dummy parameter. We only
2767 want to do this action once.
2768
2769 Also, indicate when RTL generation is to be suppressed. */
2770 if (last_named && !varargs_setup)
2771 {
2772 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
2773 current_function_pretend_args_size,
2774 second_time);
2775 varargs_setup = 1;
2776 }
2777 #endif
2778
2779 /* Determine parm's home in the stack,
2780 in case it arrives in the stack or we should pretend it did.
2781
2782 Compute the stack position and rtx where the argument arrives
2783 and its size.
2784
2785 There is one complexity here: If this was a parameter that would
2786 have been passed in registers, but wasn't only because it is
2787 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2788 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2789 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2790 0 as it was the previous time. */
2791
2792 locate_and_pad_parm (passed_mode, passed_type,
2793 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2794 1,
2795 #else
2796 #ifdef FUNCTION_INCOMING_ARG
2797 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2798 passed_type,
2799 (! last_named
2800 || varargs_setup)) != 0,
2801 #else
2802 FUNCTION_ARG (args_so_far, passed_mode,
2803 passed_type,
2804 ! last_named || varargs_setup) != 0,
2805 #endif
2806 #endif
2807 fndecl, &stack_args_size, &stack_offset, &arg_size);
2808
2809 if (! second_time)
2810 {
2811 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
2812
2813 if (offset_rtx == const0_rtx)
2814 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
2815 else
2816 stack_parm = gen_rtx (MEM, passed_mode,
2817 gen_rtx (PLUS, Pmode,
2818 internal_arg_pointer, offset_rtx));
2819
2820 /* If this is a memory ref that contains aggregate components,
2821 mark it as such for cse and loop optimize. */
2822 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2823 }
2824
2825 /* If this parameter was passed both in registers and in the stack,
2826 use the copy on the stack. */
2827 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
2828 entry_parm = 0;
2829
2830 /* If this parm was passed part in regs and part in memory,
2831 pretend it arrived entirely in memory
2832 by pushing the register-part onto the stack.
2833
2834 In the special case of a DImode or DFmode that is split,
2835 we could put it together in a pseudoreg directly,
2836 but for now that's not worth bothering with. */
2837
2838 if (entry_parm)
2839 {
2840 int nregs = 0;
2841 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2842 nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
2843 passed_type, ! last_named);
2844 #endif
2845
2846 if (nregs > 0)
2847 {
2848 current_function_pretend_args_size
2849 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
2850 / (PARM_BOUNDARY / BITS_PER_UNIT)
2851 * (PARM_BOUNDARY / BITS_PER_UNIT));
2852
2853 if (! second_time)
2854 move_block_from_reg (REGNO (entry_parm),
2855 validize_mem (stack_parm), nregs);
2856 entry_parm = stack_parm;
2857 }
2858 }
2859
2860 /* If we didn't decide this parm came in a register,
2861 by default it came on the stack. */
2862 if (entry_parm == 0)
2863 entry_parm = stack_parm;
2864
2865 /* Record permanently how this parm was passed. */
2866 if (! second_time)
2867 DECL_INCOMING_RTL (parm) = entry_parm;
2868
2869 /* If there is actually space on the stack for this parm,
2870 count it in stack_args_size; otherwise set stack_parm to 0
2871 to indicate there is no preallocated stack slot for the parm. */
2872
2873 if (entry_parm == stack_parm
2874 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
2875 /* On some machines, even if a parm value arrives in a register
2876 there is still an (uninitialized) stack slot allocated for it.
2877
2878 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
2879 whether this parameter already has a stack slot allocated,
2880 because an arg block exists only if current_function_args_size
2881 is larger than some threshhold, and we haven't calculated that
2882 yet. So, for now, we just assume that stack slots never exist
2883 in this case. */
2884 || REG_PARM_STACK_SPACE (fndecl) > 0
2885 #endif
2886 )
2887 {
2888 stack_args_size.constant += arg_size.constant;
2889 if (arg_size.var)
2890 ADD_PARM_SIZE (stack_args_size, arg_size.var);
2891 }
2892 else
2893 /* No stack slot was pushed for this parm. */
2894 stack_parm = 0;
2895
2896 /* Update info on where next arg arrives in registers. */
2897
2898 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
2899 passed_type, ! last_named);
2900
2901 /* If this is our second time through, we are done with this parm. */
2902 if (second_time)
2903 continue;
2904
2905 /* If we can't trust the parm stack slot to be aligned enough
2906 for its ultimate type, don't use that slot after entry.
2907 We'll make another stack slot, if we need one. */
2908 {
2909 #ifdef FUNCTION_ARG_BOUNDARY
2910 int thisparm_boundary
2911 = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type);
2912 #else
2913 int thisparm_boundary = PARM_BOUNDARY;
2914 #endif
2915
2916 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
2917 stack_parm = 0;
2918 }
2919
2920 /* Now adjust STACK_PARM to the mode and precise location
2921 where this parameter should live during execution,
2922 if we discover that it must live in the stack during execution.
2923 To make debuggers happier on big-endian machines, we store
2924 the value in the last bytes of the space available. */
2925
2926 if (nominal_mode != BLKmode && nominal_mode != passed_mode
2927 && stack_parm != 0)
2928 {
2929 rtx offset_rtx;
2930
2931 #if BYTES_BIG_ENDIAN
2932 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
2933 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
2934 - GET_MODE_SIZE (nominal_mode));
2935 #endif
2936
2937 offset_rtx = ARGS_SIZE_RTX (stack_offset);
2938 if (offset_rtx == const0_rtx)
2939 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
2940 else
2941 stack_parm = gen_rtx (MEM, nominal_mode,
2942 gen_rtx (PLUS, Pmode,
2943 internal_arg_pointer, offset_rtx));
2944
2945 /* If this is a memory ref that contains aggregate components,
2946 mark it as such for cse and loop optimize. */
2947 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2948 }
2949
2950 /* ENTRY_PARM is an RTX for the parameter as it arrives,
2951 in the mode in which it arrives.
2952 STACK_PARM is an RTX for a stack slot where the parameter can live
2953 during the function (in case we want to put it there).
2954 STACK_PARM is 0 if no stack slot was pushed for it.
2955
2956 Now output code if necessary to convert ENTRY_PARM to
2957 the type in which this function declares it,
2958 and store that result in an appropriate place,
2959 which may be a pseudo reg, may be STACK_PARM,
2960 or may be a local stack slot if STACK_PARM is 0.
2961
2962 Set DECL_RTL to that place. */
2963
2964 if (nominal_mode == BLKmode)
2965 {
2966 /* If a BLKmode arrives in registers, copy it to a stack slot. */
2967 if (GET_CODE (entry_parm) == REG)
2968 {
2969 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
2970 UNITS_PER_WORD);
2971
2972 /* Note that we will be storing an integral number of words.
2973 So we have to be careful to ensure that we allocate an
2974 integral number of words. We do this below in the
2975 assign_stack_local if space was not allocated in the argument
2976 list. If it was, this will not work if PARM_BOUNDARY is not
2977 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2978 if it becomes a problem. */
2979
2980 if (stack_parm == 0)
2981 {
2982 stack_parm
2983 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
2984 /* If this is a memory ref that contains aggregate components,
2985 mark it as such for cse and loop optimize. */
2986 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2987 }
2988
2989 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
2990 abort ();
2991
2992 move_block_from_reg (REGNO (entry_parm),
2993 validize_mem (stack_parm),
2994 size_stored / UNITS_PER_WORD);
2995 }
2996 DECL_RTL (parm) = stack_parm;
2997 }
2998 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
2999 && ! DECL_INLINE (fndecl))
3000 /* layout_decl may set this. */
3001 || TREE_ADDRESSABLE (parm)
3002 || TREE_SIDE_EFFECTS (parm)
3003 /* If -ffloat-store specified, don't put explicit
3004 float variables into registers. */
3005 || (flag_float_store
3006 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3007 /* Always assign pseudo to structure return or item passed
3008 by invisible reference. */
3009 || passed_pointer || parm == function_result_decl)
3010 {
3011 /* Store the parm in a pseudoregister during the function, but we
3012 may need to do it in a wider mode. */
3013
3014 register rtx parmreg;
3015
3016 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3017 if (TREE_CODE (TREE_TYPE (parm)) == INTEGER_TYPE
3018 || TREE_CODE (TREE_TYPE (parm)) == ENUMERAL_TYPE
3019 || TREE_CODE (TREE_TYPE (parm)) == BOOLEAN_TYPE
3020 || TREE_CODE (TREE_TYPE (parm)) == CHAR_TYPE
3021 || TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE
3022 || TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE
3023 || TREE_CODE (TREE_TYPE (parm)) == OFFSET_TYPE)
3024 {
3025 PROMOTE_MODE (nominal_mode, unsignedp, TREE_TYPE (parm));
3026 }
3027
3028 parmreg = gen_reg_rtx (nominal_mode);
3029 REG_USERVAR_P (parmreg) = 1;
3030
3031 /* If this was an item that we received a pointer to, set DECL_RTL
3032 appropriately. */
3033 if (passed_pointer)
3034 {
3035 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3036 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3037 }
3038 else
3039 DECL_RTL (parm) = parmreg;
3040
3041 /* Copy the value into the register. */
3042 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
3043 {
3044 /* If ENTRY_PARM is a hard register, it might be in a register
3045 not valid for operating in its mode (e.g., an odd-numbered
3046 register for a DFmode). In that case, moves are the only
3047 thing valid, so we can't do a convert from there. This
3048 occurs when the calling sequence allow such misaligned
3049 usages. */
3050 if (GET_CODE (entry_parm) == REG
3051 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
3052 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm),
3053 GET_MODE (entry_parm)))
3054 convert_move (parmreg, copy_to_reg (entry_parm), unsignedp);
3055 else
3056 convert_move (parmreg, validize_mem (entry_parm), unsignedp);
3057 }
3058 else
3059 emit_move_insn (parmreg, validize_mem (entry_parm));
3060
3061 /* If we were passed a pointer but the actual value
3062 can safely live in a register, put it in one. */
3063 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3064 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3065 && ! DECL_INLINE (fndecl))
3066 /* layout_decl may set this. */
3067 || TREE_ADDRESSABLE (parm)
3068 || TREE_SIDE_EFFECTS (parm)
3069 /* If -ffloat-store specified, don't put explicit
3070 float variables into registers. */
3071 || (flag_float_store
3072 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3073 {
3074 /* We can't use nominal_mode, because it will have been set to
3075 Pmode above. We must use the actual mode of the parm. */
3076 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3077 emit_move_insn (parmreg, DECL_RTL (parm));
3078 DECL_RTL (parm) = parmreg;
3079 }
3080
3081 /* In any case, record the parm's desired stack location
3082 in case we later discover it must live in the stack. */
3083 if (REGNO (parmreg) >= nparmregs)
3084 {
3085 rtx *new;
3086 nparmregs = REGNO (parmreg) + 5;
3087 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3088 bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx));
3089 parm_reg_stack_loc = new;
3090 }
3091 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3092
3093 /* Mark the register as eliminable if we did no conversion
3094 and it was copied from memory at a fixed offset,
3095 and the arg pointer was not copied to a pseudo-reg.
3096 If the arg pointer is a pseudo reg or the offset formed
3097 an invalid address, such memory-equivalences
3098 as we make here would screw up life analysis for it. */
3099 if (nominal_mode == passed_mode
3100 && GET_CODE (entry_parm) == MEM
3101 && entry_parm == stack_parm
3102 && stack_offset.var == 0
3103 && reg_mentioned_p (virtual_incoming_args_rtx,
3104 XEXP (entry_parm, 0)))
3105 REG_NOTES (get_last_insn ())
3106 = gen_rtx (EXPR_LIST, REG_EQUIV,
3107 entry_parm, REG_NOTES (get_last_insn ()));
3108
3109 /* For pointer data type, suggest pointer register. */
3110 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3111 mark_reg_pointer (parmreg);
3112 }
3113 else
3114 {
3115 /* Value must be stored in the stack slot STACK_PARM
3116 during function execution. */
3117
3118 if (passed_mode != nominal_mode)
3119 {
3120 /* Conversion is required. */
3121 if (GET_CODE (entry_parm) == REG
3122 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
3123 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm), passed_mode))
3124 entry_parm = copy_to_reg (entry_parm);
3125
3126 entry_parm = convert_to_mode (nominal_mode, entry_parm,
3127 TREE_UNSIGNED (TREE_TYPE (parm)));
3128 }
3129
3130 if (entry_parm != stack_parm)
3131 {
3132 if (stack_parm == 0)
3133 {
3134 stack_parm
3135 = assign_stack_local (GET_MODE (entry_parm),
3136 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3137 /* If this is a memory ref that contains aggregate components,
3138 mark it as such for cse and loop optimize. */
3139 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3140 }
3141
3142 emit_move_insn (validize_mem (stack_parm),
3143 validize_mem (entry_parm));
3144 }
3145
3146 DECL_RTL (parm) = stack_parm;
3147 }
3148
3149 /* If this "parameter" was the place where we are receiving the
3150 function's incoming structure pointer, set up the result. */
3151 if (parm == function_result_decl)
3152 DECL_RTL (DECL_RESULT (fndecl))
3153 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
3154
3155 if (TREE_THIS_VOLATILE (parm))
3156 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
3157 if (TREE_READONLY (parm))
3158 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
3159 }
3160
3161 max_parm_reg = max_reg_num ();
3162 last_parm_insn = get_last_insn ();
3163
3164 current_function_args_size = stack_args_size.constant;
3165
3166 /* Adjust function incoming argument size for alignment and
3167 minimum length. */
3168
3169 #ifdef REG_PARM_STACK_SPACE
3170 #ifndef MAYBE_REG_PARM_STACK_SPACE
3171 current_function_args_size = MAX (current_function_args_size,
3172 REG_PARM_STACK_SPACE (fndecl));
3173 #endif
3174 #endif
3175
3176 #ifdef STACK_BOUNDARY
3177 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
3178
3179 current_function_args_size
3180 = ((current_function_args_size + STACK_BYTES - 1)
3181 / STACK_BYTES) * STACK_BYTES;
3182 #endif
3183
3184 #ifdef ARGS_GROW_DOWNWARD
3185 current_function_arg_offset_rtx
3186 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
3187 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
3188 size_int (-stack_args_size.constant)),
3189 NULL_RTX, VOIDmode, 0));
3190 #else
3191 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
3192 #endif
3193
3194 /* See how many bytes, if any, of its args a function should try to pop
3195 on return. */
3196
3197 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
3198 current_function_args_size);
3199
3200 /* For stdarg.h function, save info about regs and stack space
3201 used by the named args. */
3202
3203 if (stdarg)
3204 current_function_args_info = args_so_far;
3205
3206 /* Set the rtx used for the function return value. Put this in its
3207 own variable so any optimizers that need this information don't have
3208 to include tree.h. Do this here so it gets done when an inlined
3209 function gets output. */
3210
3211 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
3212 }
3213 \f
3214 /* Compute the size and offset from the start of the stacked arguments for a
3215 parm passed in mode PASSED_MODE and with type TYPE.
3216
3217 INITIAL_OFFSET_PTR points to the current offset into the stacked
3218 arguments.
3219
3220 The starting offset and size for this parm are returned in *OFFSET_PTR
3221 and *ARG_SIZE_PTR, respectively.
3222
3223 IN_REGS is non-zero if the argument will be passed in registers. It will
3224 never be set if REG_PARM_STACK_SPACE is not defined.
3225
3226 FNDECL is the function in which the argument was defined.
3227
3228 There are two types of rounding that are done. The first, controlled by
3229 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3230 list to be aligned to the specific boundary (in bits). This rounding
3231 affects the initial and starting offsets, but not the argument size.
3232
3233 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3234 optionally rounds the size of the parm to PARM_BOUNDARY. The
3235 initial offset is not affected by this rounding, while the size always
3236 is and the starting offset may be. */
3237
3238 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
3239 initial_offset_ptr is positive because locate_and_pad_parm's
3240 callers pass in the total size of args so far as
3241 initial_offset_ptr. arg_size_ptr is always positive.*/
3242
3243 static void pad_to_arg_alignment (), pad_below ();
3244
3245 void
3246 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
3247 initial_offset_ptr, offset_ptr, arg_size_ptr)
3248 enum machine_mode passed_mode;
3249 tree type;
3250 int in_regs;
3251 tree fndecl;
3252 struct args_size *initial_offset_ptr;
3253 struct args_size *offset_ptr;
3254 struct args_size *arg_size_ptr;
3255 {
3256 tree sizetree
3257 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3258 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3259 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3260 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3261 int reg_parm_stack_space = 0;
3262
3263 #ifdef REG_PARM_STACK_SPACE
3264 /* If we have found a stack parm before we reach the end of the
3265 area reserved for registers, skip that area. */
3266 if (! in_regs)
3267 {
3268 #ifdef MAYBE_REG_PARM_STACK_SPACE
3269 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3270 #else
3271 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3272 #endif
3273 if (reg_parm_stack_space > 0)
3274 {
3275 if (initial_offset_ptr->var)
3276 {
3277 initial_offset_ptr->var
3278 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3279 size_int (reg_parm_stack_space));
3280 initial_offset_ptr->constant = 0;
3281 }
3282 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3283 initial_offset_ptr->constant = reg_parm_stack_space;
3284 }
3285 }
3286 #endif /* REG_PARM_STACK_SPACE */
3287
3288 arg_size_ptr->var = 0;
3289 arg_size_ptr->constant = 0;
3290
3291 #ifdef ARGS_GROW_DOWNWARD
3292 if (initial_offset_ptr->var)
3293 {
3294 offset_ptr->constant = 0;
3295 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3296 initial_offset_ptr->var);
3297 }
3298 else
3299 {
3300 offset_ptr->constant = - initial_offset_ptr->constant;
3301 offset_ptr->var = 0;
3302 }
3303 if (where_pad == upward
3304 && (TREE_CODE (sizetree) != INTEGER_CST
3305 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3306 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3307 SUB_PARM_SIZE (*offset_ptr, sizetree);
3308 pad_to_arg_alignment (offset_ptr, boundary);
3309 if (initial_offset_ptr->var)
3310 {
3311 arg_size_ptr->var = size_binop (MINUS_EXPR,
3312 size_binop (MINUS_EXPR,
3313 integer_zero_node,
3314 initial_offset_ptr->var),
3315 offset_ptr->var);
3316 }
3317 else
3318 {
3319 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3320 offset_ptr->constant);
3321 }
3322 /* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3323 if (where_pad == downward)
3324 pad_below (arg_size_ptr, passed_mode, sizetree);
3325 #else /* !ARGS_GROW_DOWNWARD */
3326 pad_to_arg_alignment (initial_offset_ptr, boundary);
3327 *offset_ptr = *initial_offset_ptr;
3328 if (where_pad == downward)
3329 pad_below (offset_ptr, passed_mode, sizetree);
3330
3331 #ifdef PUSH_ROUNDING
3332 if (passed_mode != BLKmode)
3333 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3334 #endif
3335
3336 if (where_pad != none
3337 && (TREE_CODE (sizetree) != INTEGER_CST
3338 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3339 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3340
3341 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3342 #endif /* ARGS_GROW_DOWNWARD */
3343 }
3344
3345 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3346 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3347
3348 static void
3349 pad_to_arg_alignment (offset_ptr, boundary)
3350 struct args_size *offset_ptr;
3351 int boundary;
3352 {
3353 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3354
3355 if (boundary > BITS_PER_UNIT)
3356 {
3357 if (offset_ptr->var)
3358 {
3359 offset_ptr->var =
3360 #ifdef ARGS_GROW_DOWNWARD
3361 round_down
3362 #else
3363 round_up
3364 #endif
3365 (ARGS_SIZE_TREE (*offset_ptr),
3366 boundary / BITS_PER_UNIT);
3367 offset_ptr->constant = 0; /*?*/
3368 }
3369 else
3370 offset_ptr->constant =
3371 #ifdef ARGS_GROW_DOWNWARD
3372 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3373 #else
3374 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3375 #endif
3376 }
3377 }
3378
3379 static void
3380 pad_below (offset_ptr, passed_mode, sizetree)
3381 struct args_size *offset_ptr;
3382 enum machine_mode passed_mode;
3383 tree sizetree;
3384 {
3385 if (passed_mode != BLKmode)
3386 {
3387 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3388 offset_ptr->constant
3389 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3390 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3391 - GET_MODE_SIZE (passed_mode));
3392 }
3393 else
3394 {
3395 if (TREE_CODE (sizetree) != INTEGER_CST
3396 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3397 {
3398 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3399 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3400 /* Add it in. */
3401 ADD_PARM_SIZE (*offset_ptr, s2);
3402 SUB_PARM_SIZE (*offset_ptr, sizetree);
3403 }
3404 }
3405 }
3406
3407 static tree
3408 round_down (value, divisor)
3409 tree value;
3410 int divisor;
3411 {
3412 return size_binop (MULT_EXPR,
3413 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3414 size_int (divisor));
3415 }
3416 \f
3417 /* Walk the tree of blocks describing the binding levels within a function
3418 and warn about uninitialized variables.
3419 This is done after calling flow_analysis and before global_alloc
3420 clobbers the pseudo-regs to hard regs. */
3421
3422 void
3423 uninitialized_vars_warning (block)
3424 tree block;
3425 {
3426 register tree decl, sub;
3427 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3428 {
3429 if (TREE_CODE (decl) == VAR_DECL
3430 /* These warnings are unreliable for and aggregates
3431 because assigning the fields one by one can fail to convince
3432 flow.c that the entire aggregate was initialized.
3433 Unions are troublesome because members may be shorter. */
3434 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3435 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
3436 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3437 && DECL_RTL (decl) != 0
3438 && GET_CODE (DECL_RTL (decl)) == REG
3439 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3440 warning_with_decl (decl,
3441 "`%s' may be used uninitialized in this function");
3442 if (TREE_CODE (decl) == VAR_DECL
3443 && DECL_RTL (decl) != 0
3444 && GET_CODE (DECL_RTL (decl)) == REG
3445 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3446 warning_with_decl (decl,
3447 "variable `%s' may be clobbered by `longjmp'");
3448 }
3449 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3450 uninitialized_vars_warning (sub);
3451 }
3452
3453 /* Do the appropriate part of uninitialized_vars_warning
3454 but for arguments instead of local variables. */
3455
3456 void
3457 setjmp_args_warning (block)
3458 tree block;
3459 {
3460 register tree decl;
3461 for (decl = DECL_ARGUMENTS (current_function_decl);
3462 decl; decl = TREE_CHAIN (decl))
3463 if (DECL_RTL (decl) != 0
3464 && GET_CODE (DECL_RTL (decl)) == REG
3465 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3466 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp'");
3467 }
3468
3469 /* If this function call setjmp, put all vars into the stack
3470 unless they were declared `register'. */
3471
3472 void
3473 setjmp_protect (block)
3474 tree block;
3475 {
3476 register tree decl, sub;
3477 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3478 if ((TREE_CODE (decl) == VAR_DECL
3479 || TREE_CODE (decl) == PARM_DECL)
3480 && DECL_RTL (decl) != 0
3481 && GET_CODE (DECL_RTL (decl)) == REG
3482 /* If this variable came from an inline function, it must be
3483 that it's life doesn't overlap the setjmp. If there was a
3484 setjmp in the function, it would already be in memory. We
3485 must exclude such variable because their DECL_RTL might be
3486 set to strange things such as virtual_stack_vars_rtx. */
3487 && ! DECL_FROM_INLINE (decl)
3488 && (
3489 #ifdef NON_SAVING_SETJMP
3490 /* If longjmp doesn't restore the registers,
3491 don't put anything in them. */
3492 NON_SAVING_SETJMP
3493 ||
3494 #endif
3495 ! DECL_REGISTER (decl)))
3496 put_var_into_stack (decl);
3497 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3498 setjmp_protect (sub);
3499 }
3500 \f
3501 /* Like the previous function, but for args instead of local variables. */
3502
3503 void
3504 setjmp_protect_args ()
3505 {
3506 register tree decl, sub;
3507 for (decl = DECL_ARGUMENTS (current_function_decl);
3508 decl; decl = TREE_CHAIN (decl))
3509 if ((TREE_CODE (decl) == VAR_DECL
3510 || TREE_CODE (decl) == PARM_DECL)
3511 && DECL_RTL (decl) != 0
3512 && GET_CODE (DECL_RTL (decl)) == REG
3513 && (
3514 /* If longjmp doesn't restore the registers,
3515 don't put anything in them. */
3516 #ifdef NON_SAVING_SETJMP
3517 NON_SAVING_SETJMP
3518 ||
3519 #endif
3520 ! DECL_REGISTER (decl)))
3521 put_var_into_stack (decl);
3522 }
3523 \f
3524 /* Return the context-pointer register corresponding to DECL,
3525 or 0 if it does not need one. */
3526
3527 rtx
3528 lookup_static_chain (decl)
3529 tree decl;
3530 {
3531 tree context = decl_function_context (decl);
3532 tree link;
3533
3534 if (context == 0)
3535 return 0;
3536
3537 /* We treat inline_function_decl as an alias for the current function
3538 because that is the inline function whose vars, types, etc.
3539 are being merged into the current function.
3540 See expand_inline_function. */
3541 if (context == current_function_decl || context == inline_function_decl)
3542 return virtual_stack_vars_rtx;
3543
3544 for (link = context_display; link; link = TREE_CHAIN (link))
3545 if (TREE_PURPOSE (link) == context)
3546 return RTL_EXPR_RTL (TREE_VALUE (link));
3547
3548 abort ();
3549 }
3550 \f
3551 /* Convert a stack slot address ADDR for variable VAR
3552 (from a containing function)
3553 into an address valid in this function (using a static chain). */
3554
3555 rtx
3556 fix_lexical_addr (addr, var)
3557 rtx addr;
3558 tree var;
3559 {
3560 rtx basereg;
3561 int displacement;
3562 tree context = decl_function_context (var);
3563 struct function *fp;
3564 rtx base = 0;
3565
3566 /* If this is the present function, we need not do anything. */
3567 if (context == current_function_decl || context == inline_function_decl)
3568 return addr;
3569
3570 for (fp = outer_function_chain; fp; fp = fp->next)
3571 if (fp->decl == context)
3572 break;
3573
3574 if (fp == 0)
3575 abort ();
3576
3577 /* Decode given address as base reg plus displacement. */
3578 if (GET_CODE (addr) == REG)
3579 basereg = addr, displacement = 0;
3580 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3581 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3582 else
3583 abort ();
3584
3585 /* We accept vars reached via the containing function's
3586 incoming arg pointer and via its stack variables pointer. */
3587 if (basereg == fp->internal_arg_pointer)
3588 {
3589 /* If reached via arg pointer, get the arg pointer value
3590 out of that function's stack frame.
3591
3592 There are two cases: If a separate ap is needed, allocate a
3593 slot in the outer function for it and dereference it that way.
3594 This is correct even if the real ap is actually a pseudo.
3595 Otherwise, just adjust the offset from the frame pointer to
3596 compensate. */
3597
3598 #ifdef NEED_SEPARATE_AP
3599 rtx addr;
3600
3601 if (fp->arg_pointer_save_area == 0)
3602 fp->arg_pointer_save_area
3603 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
3604
3605 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
3606 addr = memory_address (Pmode, addr);
3607
3608 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
3609 #else
3610 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
3611 base = lookup_static_chain (var);
3612 #endif
3613 }
3614
3615 else if (basereg == virtual_stack_vars_rtx)
3616 {
3617 /* This is the same code as lookup_static_chain, duplicated here to
3618 avoid an extra call to decl_function_context. */
3619 tree link;
3620
3621 for (link = context_display; link; link = TREE_CHAIN (link))
3622 if (TREE_PURPOSE (link) == context)
3623 {
3624 base = RTL_EXPR_RTL (TREE_VALUE (link));
3625 break;
3626 }
3627 }
3628
3629 if (base == 0)
3630 abort ();
3631
3632 /* Use same offset, relative to appropriate static chain or argument
3633 pointer. */
3634 return plus_constant (base, displacement);
3635 }
3636 \f
3637 /* Return the address of the trampoline for entering nested fn FUNCTION.
3638 If necessary, allocate a trampoline (in the stack frame)
3639 and emit rtl to initialize its contents (at entry to this function). */
3640
3641 rtx
3642 trampoline_address (function)
3643 tree function;
3644 {
3645 tree link;
3646 tree rtlexp;
3647 rtx tramp;
3648 struct function *fp;
3649 tree fn_context;
3650
3651 /* Find an existing trampoline and return it. */
3652 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3653 if (TREE_PURPOSE (link) == function)
3654 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
3655 for (fp = outer_function_chain; fp; fp = fp->next)
3656 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
3657 if (TREE_PURPOSE (link) == function)
3658 {
3659 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
3660 function);
3661 return round_trampoline_addr (tramp);
3662 }
3663
3664 /* None exists; we must make one. */
3665
3666 /* Find the `struct function' for the function containing FUNCTION. */
3667 fp = 0;
3668 fn_context = decl_function_context (function);
3669 if (fn_context != current_function_decl)
3670 for (fp = outer_function_chain; fp; fp = fp->next)
3671 if (fp->decl == fn_context)
3672 break;
3673
3674 /* Allocate run-time space for this trampoline
3675 (usually in the defining function's stack frame). */
3676 #ifdef ALLOCATE_TRAMPOLINE
3677 tramp = ALLOCATE_TRAMPOLINE (fp);
3678 #else
3679 /* If rounding needed, allocate extra space
3680 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3681 #ifdef TRAMPOLINE_ALIGNMENT
3682 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3683 #else
3684 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3685 #endif
3686 if (fp != 0)
3687 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
3688 else
3689 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
3690 #endif
3691
3692 /* Record the trampoline for reuse and note it for later initialization
3693 by expand_function_end. */
3694 if (fp != 0)
3695 {
3696 push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack);
3697 rtlexp = make_node (RTL_EXPR);
3698 RTL_EXPR_RTL (rtlexp) = tramp;
3699 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
3700 pop_obstacks ();
3701 }
3702 else
3703 {
3704 /* Make the RTL_EXPR node temporary, not momentary, so that the
3705 trampoline_list doesn't become garbage. */
3706 int momentary = suspend_momentary ();
3707 rtlexp = make_node (RTL_EXPR);
3708 resume_momentary (momentary);
3709
3710 RTL_EXPR_RTL (rtlexp) = tramp;
3711 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
3712 }
3713
3714 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
3715 return round_trampoline_addr (tramp);
3716 }
3717
3718 /* Given a trampoline address,
3719 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3720
3721 static rtx
3722 round_trampoline_addr (tramp)
3723 rtx tramp;
3724 {
3725 #ifdef TRAMPOLINE_ALIGNMENT
3726 /* Round address up to desired boundary. */
3727 rtx temp = gen_reg_rtx (Pmode);
3728 temp = expand_binop (Pmode, add_optab, tramp,
3729 GEN_INT (TRAMPOLINE_ALIGNMENT - 1),
3730 temp, 0, OPTAB_LIB_WIDEN);
3731 tramp = expand_binop (Pmode, and_optab, temp,
3732 GEN_INT (- TRAMPOLINE_ALIGNMENT),
3733 temp, 0, OPTAB_LIB_WIDEN);
3734 #endif
3735 return tramp;
3736 }
3737 \f
3738 /* The functions identify_blocks and reorder_blocks provide a way to
3739 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
3740 duplicate portions of the RTL code. Call identify_blocks before
3741 changing the RTL, and call reorder_blocks after. */
3742
3743 static int all_blocks ();
3744 static tree blocks_nreverse ();
3745
3746 /* Put all this function's BLOCK nodes into a vector, and return it.
3747 Also store in each NOTE for the beginning or end of a block
3748 the index of that block in the vector.
3749 The arguments are TOP_BLOCK, the top-level block of the function,
3750 and INSNS, the insn chain of the function. */
3751
3752 tree *
3753 identify_blocks (top_block, insns)
3754 tree top_block;
3755 rtx insns;
3756 {
3757 int n_blocks;
3758 tree *block_vector;
3759 int *block_stack;
3760 int depth = 0;
3761 int next_block_number = 0;
3762 int current_block_number = 0;
3763 rtx insn;
3764
3765 if (top_block == 0)
3766 return 0;
3767
3768 n_blocks = all_blocks (top_block, 0);
3769 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
3770 block_stack = (int *) alloca (n_blocks * sizeof (int));
3771
3772 all_blocks (top_block, block_vector);
3773
3774 for (insn = insns; insn; insn = NEXT_INSN (insn))
3775 if (GET_CODE (insn) == NOTE)
3776 {
3777 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3778 {
3779 block_stack[depth++] = current_block_number;
3780 current_block_number = next_block_number;
3781 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
3782 }
3783 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3784 {
3785 current_block_number = block_stack[--depth];
3786 NOTE_BLOCK_NUMBER (insn) = current_block_number;
3787 }
3788 }
3789
3790 return block_vector;
3791 }
3792
3793 /* Given BLOCK_VECTOR which was returned by identify_blocks,
3794 and a revised instruction chain, rebuild the tree structure
3795 of BLOCK nodes to correspond to the new order of RTL.
3796 The new block tree is inserted below TOP_BLOCK.
3797 Returns the current top-level block. */
3798
3799 tree
3800 reorder_blocks (block_vector, top_block, insns)
3801 tree *block_vector;
3802 tree top_block;
3803 rtx insns;
3804 {
3805 tree current_block = top_block;
3806 rtx insn;
3807
3808 if (block_vector == 0)
3809 return top_block;
3810
3811 /* Prune the old tree away, so that it doesn't get in the way. */
3812 BLOCK_SUBBLOCKS (current_block) = 0;
3813
3814 for (insn = insns; insn; insn = NEXT_INSN (insn))
3815 if (GET_CODE (insn) == NOTE)
3816 {
3817 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3818 {
3819 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
3820 /* If we have seen this block before, copy it. */
3821 if (TREE_ASM_WRITTEN (block))
3822 block = copy_node (block);
3823 BLOCK_SUBBLOCKS (block) = 0;
3824 TREE_ASM_WRITTEN (block) = 1;
3825 BLOCK_SUPERCONTEXT (block) = current_block;
3826 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3827 BLOCK_SUBBLOCKS (current_block) = block;
3828 current_block = block;
3829 NOTE_SOURCE_FILE (insn) = 0;
3830 }
3831 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3832 {
3833 BLOCK_SUBBLOCKS (current_block)
3834 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3835 current_block = BLOCK_SUPERCONTEXT (current_block);
3836 NOTE_SOURCE_FILE (insn) = 0;
3837 }
3838 }
3839
3840 return current_block;
3841 }
3842
3843 /* Reverse the order of elements in the chain T of blocks,
3844 and return the new head of the chain (old last element). */
3845
3846 static tree
3847 blocks_nreverse (t)
3848 tree t;
3849 {
3850 register tree prev = 0, decl, next;
3851 for (decl = t; decl; decl = next)
3852 {
3853 next = BLOCK_CHAIN (decl);
3854 BLOCK_CHAIN (decl) = prev;
3855 prev = decl;
3856 }
3857 return prev;
3858 }
3859
3860 /* Count the subblocks of BLOCK, and list them all into the vector VECTOR.
3861 Also clear TREE_ASM_WRITTEN in all blocks. */
3862
3863 static int
3864 all_blocks (block, vector)
3865 tree block;
3866 tree *vector;
3867 {
3868 int n_blocks = 1;
3869 tree subblocks;
3870
3871 TREE_ASM_WRITTEN (block) = 0;
3872 /* Record this block. */
3873 if (vector)
3874 vector[0] = block;
3875
3876 /* Record the subblocks, and their subblocks. */
3877 for (subblocks = BLOCK_SUBBLOCKS (block);
3878 subblocks; subblocks = BLOCK_CHAIN (subblocks))
3879 n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0);
3880
3881 return n_blocks;
3882 }
3883 \f
3884 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3885 and initialize static variables for generating RTL for the statements
3886 of the function. */
3887
3888 void
3889 init_function_start (subr, filename, line)
3890 tree subr;
3891 char *filename;
3892 int line;
3893 {
3894 char *junk;
3895
3896 init_stmt_for_function ();
3897
3898 cse_not_expected = ! optimize;
3899
3900 /* Caller save not needed yet. */
3901 caller_save_needed = 0;
3902
3903 /* No stack slots have been made yet. */
3904 stack_slot_list = 0;
3905
3906 /* There is no stack slot for handling nonlocal gotos. */
3907 nonlocal_goto_handler_slot = 0;
3908 nonlocal_goto_stack_level = 0;
3909
3910 /* No labels have been declared for nonlocal use. */
3911 nonlocal_labels = 0;
3912
3913 /* No function calls so far in this function. */
3914 function_call_count = 0;
3915
3916 /* No parm regs have been allocated.
3917 (This is important for output_inline_function.) */
3918 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3919
3920 /* Initialize the RTL mechanism. */
3921 init_emit ();
3922
3923 /* Initialize the queue of pending postincrement and postdecrements,
3924 and some other info in expr.c. */
3925 init_expr ();
3926
3927 /* We haven't done register allocation yet. */
3928 reg_renumber = 0;
3929
3930 init_const_rtx_hash_table ();
3931
3932 current_function_name = (*decl_printable_name) (subr, &junk);
3933
3934 /* Nonzero if this is a nested function that uses a static chain. */
3935
3936 current_function_needs_context
3937 = (decl_function_context (current_function_decl) != 0);
3938
3939 /* Set if a call to setjmp is seen. */
3940 current_function_calls_setjmp = 0;
3941
3942 /* Set if a call to longjmp is seen. */
3943 current_function_calls_longjmp = 0;
3944
3945 current_function_calls_alloca = 0;
3946 current_function_has_nonlocal_label = 0;
3947 current_function_contains_functions = 0;
3948
3949 current_function_returns_pcc_struct = 0;
3950 current_function_returns_struct = 0;
3951 current_function_epilogue_delay_list = 0;
3952 current_function_uses_const_pool = 0;
3953 current_function_uses_pic_offset_table = 0;
3954
3955 /* We have not yet needed to make a label to jump to for tail-recursion. */
3956 tail_recursion_label = 0;
3957
3958 /* We haven't had a need to make a save area for ap yet. */
3959
3960 arg_pointer_save_area = 0;
3961
3962 /* No stack slots allocated yet. */
3963 frame_offset = 0;
3964
3965 /* No SAVE_EXPRs in this function yet. */
3966 save_expr_regs = 0;
3967
3968 /* No RTL_EXPRs in this function yet. */
3969 rtl_expr_chain = 0;
3970
3971 /* We have not allocated any temporaries yet. */
3972 temp_slots = 0;
3973 temp_slot_level = 0;
3974
3975 /* Within function body, compute a type's size as soon it is laid out. */
3976 immediate_size_expand++;
3977
3978 init_pending_stack_adjust ();
3979 inhibit_defer_pop = 0;
3980
3981 current_function_outgoing_args_size = 0;
3982
3983 /* Initialize the insn lengths. */
3984 init_insn_lengths ();
3985
3986 /* Prevent ever trying to delete the first instruction of a function.
3987 Also tell final how to output a linenum before the function prologue. */
3988 emit_line_note (filename, line);
3989
3990 /* Make sure first insn is a note even if we don't want linenums.
3991 This makes sure the first insn will never be deleted.
3992 Also, final expects a note to appear there. */
3993 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3994
3995 /* Set flags used by final.c. */
3996 if (aggregate_value_p (DECL_RESULT (subr)))
3997 {
3998 #ifdef PCC_STATIC_STRUCT_RETURN
3999 if (flag_pcc_struct_return)
4000 current_function_returns_pcc_struct = 1;
4001 else
4002 #endif
4003 current_function_returns_struct = 1;
4004 }
4005
4006 /* Warn if this value is an aggregate type,
4007 regardless of which calling convention we are using for it. */
4008 if (warn_aggregate_return
4009 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
4010 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
4011 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
4012 warning ("function returns an aggregate");
4013
4014 current_function_returns_pointer
4015 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
4016
4017 /* Indicate that we need to distinguish between the return value of the
4018 present function and the return value of a function being called. */
4019 rtx_equal_function_value_matters = 1;
4020
4021 /* Indicate that we have not instantiated virtual registers yet. */
4022 virtuals_instantiated = 0;
4023
4024 /* Indicate we have no need of a frame pointer yet. */
4025 frame_pointer_needed = 0;
4026
4027 /* By default assume not varargs. */
4028 current_function_varargs = 0;
4029 }
4030
4031 /* Indicate that the current function uses extra args
4032 not explicitly mentioned in the argument list in any fashion. */
4033
4034 void
4035 mark_varargs ()
4036 {
4037 current_function_varargs = 1;
4038 }
4039
4040 /* Expand a call to __main at the beginning of a possible main function. */
4041
4042 void
4043 expand_main_function ()
4044 {
4045 #if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
4046 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__main"), 0,
4047 VOIDmode, 0);
4048 #endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
4049 }
4050 \f
4051 /* Start the RTL for a new function, and set variables used for
4052 emitting RTL.
4053 SUBR is the FUNCTION_DECL node.
4054 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4055 the function's parameters, which must be run at any return statement. */
4056
4057 void
4058 expand_function_start (subr, parms_have_cleanups)
4059 tree subr;
4060 int parms_have_cleanups;
4061 {
4062 register int i;
4063 tree tem;
4064 rtx last_ptr;
4065
4066 /* Make sure volatile mem refs aren't considered
4067 valid operands of arithmetic insns. */
4068 init_recog_no_volatile ();
4069
4070 /* If function gets a static chain arg, store it in the stack frame.
4071 Do this first, so it gets the first stack slot offset. */
4072 if (current_function_needs_context)
4073 {
4074 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4075 emit_move_insn (last_ptr, static_chain_incoming_rtx);
4076 }
4077
4078 /* If the parameters of this function need cleaning up, get a label
4079 for the beginning of the code which executes those cleanups. This must
4080 be done before doing anything with return_label. */
4081 if (parms_have_cleanups)
4082 cleanup_label = gen_label_rtx ();
4083 else
4084 cleanup_label = 0;
4085
4086 /* Make the label for return statements to jump to, if this machine
4087 does not have a one-instruction return and uses an epilogue,
4088 or if it returns a structure, or if it has parm cleanups. */
4089 #ifdef HAVE_return
4090 if (cleanup_label == 0 && HAVE_return
4091 && ! current_function_returns_pcc_struct
4092 && ! (current_function_returns_struct && ! optimize))
4093 return_label = 0;
4094 else
4095 return_label = gen_label_rtx ();
4096 #else
4097 return_label = gen_label_rtx ();
4098 #endif
4099
4100 /* Initialize rtx used to return the value. */
4101 /* Do this before assign_parms so that we copy the struct value address
4102 before any library calls that assign parms might generate. */
4103
4104 /* Decide whether to return the value in memory or in a register. */
4105 if (aggregate_value_p (DECL_RESULT (subr)))
4106 {
4107 /* Returning something that won't go in a register. */
4108 register rtx value_address;
4109
4110 #ifdef PCC_STATIC_STRUCT_RETURN
4111 if (current_function_returns_pcc_struct)
4112 {
4113 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4114 value_address = assemble_static_space (size);
4115 }
4116 else
4117 #endif
4118 {
4119 /* Expect to be passed the address of a place to store the value.
4120 If it is passed as an argument, assign_parms will take care of
4121 it. */
4122 if (struct_value_incoming_rtx)
4123 {
4124 value_address = gen_reg_rtx (Pmode);
4125 emit_move_insn (value_address, struct_value_incoming_rtx);
4126 }
4127 }
4128 if (value_address)
4129 DECL_RTL (DECL_RESULT (subr))
4130 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
4131 value_address);
4132 }
4133 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4134 /* If return mode is void, this decl rtl should not be used. */
4135 DECL_RTL (DECL_RESULT (subr)) = 0;
4136 else if (parms_have_cleanups)
4137 {
4138 /* If function will end with cleanup code for parms,
4139 compute the return values into a pseudo reg,
4140 which we will copy into the true return register
4141 after the cleanups are done. */
4142
4143 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
4144 #ifdef PROMOTE_FUNCTION_RETURN
4145 tree type = TREE_TYPE (DECL_RESULT (subr));
4146 int unsignedp = TREE_UNSIGNED (type);
4147
4148 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
4149 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
4150 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
4151 || TREE_CODE (type) == OFFSET_TYPE)
4152 {
4153 PROMOTE_MODE (mode, unsignedp, type);
4154 }
4155 #endif
4156
4157 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
4158 }
4159 else
4160 /* Scalar, returned in a register. */
4161 {
4162 #ifdef FUNCTION_OUTGOING_VALUE
4163 DECL_RTL (DECL_RESULT (subr))
4164 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4165 #else
4166 DECL_RTL (DECL_RESULT (subr))
4167 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4168 #endif
4169
4170 /* Mark this reg as the function's return value. */
4171 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
4172 {
4173 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
4174 /* Needed because we may need to move this to memory
4175 in case it's a named return value whose address is taken. */
4176 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4177 }
4178 }
4179
4180 /* Initialize rtx for parameters and local variables.
4181 In some cases this requires emitting insns. */
4182
4183 assign_parms (subr, 0);
4184
4185 /* The following was moved from init_function_start.
4186 The move is supposed to make sdb output more accurate. */
4187 /* Indicate the beginning of the function body,
4188 as opposed to parm setup. */
4189 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
4190
4191 /* If doing stupid allocation, mark parms as born here. */
4192
4193 if (GET_CODE (get_last_insn ()) != NOTE)
4194 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4195 parm_birth_insn = get_last_insn ();
4196
4197 if (obey_regdecls)
4198 {
4199 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4200 use_variable (regno_reg_rtx[i]);
4201
4202 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4203 use_variable (current_function_internal_arg_pointer);
4204 }
4205
4206 /* Fetch static chain values for containing functions. */
4207 tem = decl_function_context (current_function_decl);
4208 /* If not doing stupid register allocation, then start off with the static
4209 chain pointer in a pseudo register. Otherwise, we use the stack
4210 address that was generated above. */
4211 if (tem && ! obey_regdecls)
4212 last_ptr = copy_to_reg (static_chain_incoming_rtx);
4213 context_display = 0;
4214 while (tem)
4215 {
4216 tree rtlexp = make_node (RTL_EXPR);
4217
4218 RTL_EXPR_RTL (rtlexp) = last_ptr;
4219 context_display = tree_cons (tem, rtlexp, context_display);
4220 tem = decl_function_context (tem);
4221 if (tem == 0)
4222 break;
4223 /* Chain thru stack frames, assuming pointer to next lexical frame
4224 is found at the place we always store it. */
4225 #ifdef FRAME_GROWS_DOWNWARD
4226 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
4227 #endif
4228 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
4229 memory_address (Pmode, last_ptr)));
4230 }
4231
4232 /* After the display initializations is where the tail-recursion label
4233 should go, if we end up needing one. Ensure we have a NOTE here
4234 since some things (like trampolines) get placed before this. */
4235 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
4236
4237 /* Evaluate now the sizes of any types declared among the arguments. */
4238 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
4239 expand_expr (TREE_VALUE (tem), NULL_RTX, VOIDmode, 0);
4240
4241 /* Make sure there is a line number after the function entry setup code. */
4242 force_next_line_note ();
4243 }
4244 \f
4245 /* Generate RTL for the end of the current function.
4246 FILENAME and LINE are the current position in the source file. */
4247
4248 /* It is up to language-specific callers to do cleanups for parameters. */
4249
4250 void
4251 expand_function_end (filename, line)
4252 char *filename;
4253 int line;
4254 {
4255 register int i;
4256 tree link;
4257
4258 static rtx initial_trampoline;
4259
4260 #ifdef NON_SAVING_SETJMP
4261 /* Don't put any variables in registers if we call setjmp
4262 on a machine that fails to restore the registers. */
4263 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
4264 {
4265 setjmp_protect (DECL_INITIAL (current_function_decl));
4266 setjmp_protect_args ();
4267 }
4268 #endif
4269
4270 /* Save the argument pointer if a save area was made for it. */
4271 if (arg_pointer_save_area)
4272 {
4273 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
4274 emit_insn_before (x, tail_recursion_reentry);
4275 }
4276
4277 /* Initialize any trampolines required by this function. */
4278 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4279 {
4280 tree function = TREE_PURPOSE (link);
4281 rtx context = lookup_static_chain (function);
4282 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
4283 rtx seq;
4284
4285 /* First make sure this compilation has a template for
4286 initializing trampolines. */
4287 if (initial_trampoline == 0)
4288 {
4289 end_temporary_allocation ();
4290 initial_trampoline
4291 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
4292 resume_temporary_allocation ();
4293 }
4294
4295 /* Generate insns to initialize the trampoline. */
4296 start_sequence ();
4297 tramp = change_address (initial_trampoline, BLKmode,
4298 round_trampoline_addr (XEXP (tramp, 0)));
4299 emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE),
4300 FUNCTION_BOUNDARY / BITS_PER_UNIT);
4301 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
4302 XEXP (DECL_RTL (function), 0), context);
4303 seq = get_insns ();
4304 end_sequence ();
4305
4306 /* Put those insns at entry to the containing function (this one). */
4307 emit_insns_before (seq, tail_recursion_reentry);
4308 }
4309 /* Clear the trampoline_list for the next function. */
4310 trampoline_list = 0;
4311
4312 #if 0 /* I think unused parms are legitimate enough. */
4313 /* Warn about unused parms. */
4314 if (warn_unused)
4315 {
4316 rtx decl;
4317
4318 for (decl = DECL_ARGUMENTS (current_function_decl);
4319 decl; decl = TREE_CHAIN (decl))
4320 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
4321 warning_with_decl (decl, "unused parameter `%s'");
4322 }
4323 #endif
4324
4325 /* Delete handlers for nonlocal gotos if nothing uses them. */
4326 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
4327 delete_handlers ();
4328
4329 /* End any sequences that failed to be closed due to syntax errors. */
4330 while (in_sequence_p ())
4331 end_sequence ();
4332
4333 /* Outside function body, can't compute type's actual size
4334 until next function's body starts. */
4335 immediate_size_expand--;
4336
4337 /* If doing stupid register allocation,
4338 mark register parms as dying here. */
4339
4340 if (obey_regdecls)
4341 {
4342 rtx tem;
4343 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4344 use_variable (regno_reg_rtx[i]);
4345
4346 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
4347
4348 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
4349 {
4350 use_variable (XEXP (tem, 0));
4351 use_variable_after (XEXP (tem, 0), parm_birth_insn);
4352 }
4353
4354 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4355 use_variable (current_function_internal_arg_pointer);
4356 }
4357
4358 clear_pending_stack_adjust ();
4359 do_pending_stack_adjust ();
4360
4361 /* Mark the end of the function body.
4362 If control reaches this insn, the function can drop through
4363 without returning a value. */
4364 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
4365
4366 /* Output a linenumber for the end of the function.
4367 SDB depends on this. */
4368 emit_line_note_force (filename, line);
4369
4370 /* Output the label for the actual return from the function,
4371 if one is expected. This happens either because a function epilogue
4372 is used instead of a return instruction, or because a return was done
4373 with a goto in order to run local cleanups, or because of pcc-style
4374 structure returning. */
4375
4376 if (return_label)
4377 emit_label (return_label);
4378
4379 /* If we had calls to alloca, and this machine needs
4380 an accurate stack pointer to exit the function,
4381 insert some code to save and restore the stack pointer. */
4382 #ifdef EXIT_IGNORE_STACK
4383 if (! EXIT_IGNORE_STACK)
4384 #endif
4385 if (current_function_calls_alloca)
4386 {
4387 rtx tem = 0;
4388
4389 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4390 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4391 }
4392
4393 /* If scalar return value was computed in a pseudo-reg,
4394 copy that to the hard return register. */
4395 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
4396 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
4397 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
4398 >= FIRST_PSEUDO_REGISTER))
4399 {
4400 rtx real_decl_result;
4401
4402 #ifdef FUNCTION_OUTGOING_VALUE
4403 real_decl_result
4404 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4405 current_function_decl);
4406 #else
4407 real_decl_result
4408 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4409 current_function_decl);
4410 #endif
4411 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
4412 emit_move_insn (real_decl_result,
4413 DECL_RTL (DECL_RESULT (current_function_decl)));
4414 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
4415 }
4416
4417 /* If returning a structure, arrange to return the address of the value
4418 in a place where debuggers expect to find it.
4419
4420 If returning a structure PCC style,
4421 the caller also depends on this value.
4422 And current_function_returns_pcc_struct is not necessarily set. */
4423 if (current_function_returns_struct
4424 || current_function_returns_pcc_struct)
4425 {
4426 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4427 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4428 #ifdef FUNCTION_OUTGOING_VALUE
4429 rtx outgoing
4430 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4431 current_function_decl);
4432 #else
4433 rtx outgoing
4434 = FUNCTION_VALUE (build_pointer_type (type),
4435 current_function_decl);
4436 #endif
4437
4438 /* Mark this as a function return value so integrate will delete the
4439 assignment and USE below when inlining this function. */
4440 REG_FUNCTION_VALUE_P (outgoing) = 1;
4441
4442 emit_move_insn (outgoing, value_address);
4443 use_variable (outgoing);
4444 }
4445
4446 /* Output a return insn if we are using one.
4447 Otherwise, let the rtl chain end here, to drop through
4448 into the epilogue. */
4449
4450 #ifdef HAVE_return
4451 if (HAVE_return)
4452 {
4453 emit_jump_insn (gen_return ());
4454 emit_barrier ();
4455 }
4456 #endif
4457
4458 /* Fix up any gotos that jumped out to the outermost
4459 binding level of the function.
4460 Must follow emitting RETURN_LABEL. */
4461
4462 /* If you have any cleanups to do at this point,
4463 and they need to create temporary variables,
4464 then you will lose. */
4465 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0);
4466 }
4467 \f
4468 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
4469
4470 static int *prologue;
4471 static int *epilogue;
4472
4473 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
4474 or a single insn). */
4475
4476 static int *
4477 record_insns (insns)
4478 rtx insns;
4479 {
4480 int *vec;
4481
4482 if (GET_CODE (insns) == SEQUENCE)
4483 {
4484 int len = XVECLEN (insns, 0);
4485 vec = (int *) oballoc ((len + 1) * sizeof (int));
4486 vec[len] = 0;
4487 while (--len >= 0)
4488 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
4489 }
4490 else
4491 {
4492 vec = (int *) oballoc (2 * sizeof (int));
4493 vec[0] = INSN_UID (insns);
4494 vec[1] = 0;
4495 }
4496 return vec;
4497 }
4498
4499 /* Determine how many INSN_UIDs in VEC are part of INSN. */
4500
4501 static int
4502 contains (insn, vec)
4503 rtx insn;
4504 int *vec;
4505 {
4506 register int i, j;
4507
4508 if (GET_CODE (insn) == INSN
4509 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4510 {
4511 int count = 0;
4512 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4513 for (j = 0; vec[j]; j++)
4514 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
4515 count++;
4516 return count;
4517 }
4518 else
4519 {
4520 for (j = 0; vec[j]; j++)
4521 if (INSN_UID (insn) == vec[j])
4522 return 1;
4523 }
4524 return 0;
4525 }
4526
4527 /* Generate the prologe and epilogue RTL if the machine supports it. Thread
4528 this into place with notes indicating where the prologue ends and where
4529 the epilogue begins. Update the basic block information when possible. */
4530
4531 void
4532 thread_prologue_and_epilogue_insns (f)
4533 rtx f;
4534 {
4535 #ifdef HAVE_prologue
4536 if (HAVE_prologue)
4537 {
4538 rtx head, seq, insn;
4539
4540 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
4541 prologue insns and a NOTE_INSN_PROLOGUE_END. */
4542 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
4543 seq = gen_prologue ();
4544 head = emit_insn_after (seq, f);
4545
4546 /* Include the new prologue insns in the first block. Ignore them
4547 if they form a basic block unto themselves. */
4548 if (basic_block_head && n_basic_blocks
4549 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
4550 basic_block_head[0] = NEXT_INSN (f);
4551
4552 /* Retain a map of the prologue insns. */
4553 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
4554 }
4555 else
4556 #endif
4557 prologue = 0;
4558
4559 #ifdef HAVE_epilogue
4560 if (HAVE_epilogue)
4561 {
4562 rtx insn = get_last_insn ();
4563 rtx prev = prev_nonnote_insn (insn);
4564
4565 /* If we end with a BARRIER, we don't need an epilogue. */
4566 if (! (prev && GET_CODE (prev) == BARRIER))
4567 {
4568 rtx tail, seq;
4569
4570 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG,
4571 the epilogue insns (this must include the jump insn that
4572 returns), USE insns ad the end of a function, and a BARRIER. */
4573
4574 emit_barrier_after (insn);
4575
4576 /* Place the epilogue before the USE insns at the end of a
4577 function. */
4578 while (prev
4579 && GET_CODE (prev) == INSN
4580 && GET_CODE (PATTERN (prev)) == USE)
4581 {
4582 insn = PREV_INSN (prev);
4583 prev = prev_nonnote_insn (prev);
4584 }
4585
4586 seq = gen_epilogue ();
4587 tail = emit_jump_insn_after (seq, insn);
4588 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
4589
4590 /* Include the new epilogue insns in the last block. Ignore
4591 them if they form a basic block unto themselves. */
4592 if (basic_block_end && n_basic_blocks
4593 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
4594 basic_block_end[n_basic_blocks - 1] = tail;
4595
4596 /* Retain a map of the epilogue insns. */
4597 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
4598 return;
4599 }
4600 }
4601 #endif
4602 epilogue = 0;
4603 }
4604
4605 /* Reposition the prologue-end and epilogue-begin notes after instruction
4606 scheduling and delayed branch scheduling. */
4607
4608 void
4609 reposition_prologue_and_epilogue_notes (f)
4610 rtx f;
4611 {
4612 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
4613 /* Reposition the prologue and epilogue notes. */
4614 if (n_basic_blocks)
4615 {
4616 rtx next, prev;
4617 int len;
4618
4619 if (prologue)
4620 {
4621 register rtx insn, note = 0;
4622
4623 /* Scan from the beginning until we reach the last prologue insn.
4624 We apparently can't depend on basic_block_{head,end} after
4625 reorg has run. */
4626 for (len = 0; prologue[len]; len++)
4627 ;
4628 for (insn = f; insn; insn = NEXT_INSN (insn))
4629 if (GET_CODE (insn) == NOTE)
4630 {
4631 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
4632 note = insn;
4633 }
4634 else if ((len -= contains (insn, prologue)) == 0)
4635 {
4636 /* Find the prologue-end note if we haven't already, and
4637 move it to just after the last prologue insn. */
4638 if (note == 0)
4639 for (note = insn; note = NEXT_INSN (note);)
4640 if (GET_CODE (note) == NOTE
4641 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
4642 break;
4643 next = NEXT_INSN (note);
4644 prev = PREV_INSN (note);
4645 if (prev)
4646 NEXT_INSN (prev) = next;
4647 if (next)
4648 PREV_INSN (next) = prev;
4649 add_insn_after (note, insn);
4650 break;
4651 }
4652 }
4653
4654 if (epilogue)
4655 {
4656 register rtx insn, note = 0;
4657
4658 /* Scan from the end until we reach the first epilogue insn.
4659 We apparently can't depend on basic_block_{head,end} after
4660 reorg has run. */
4661 for (len = 0; epilogue[len]; len++)
4662 ;
4663 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
4664 if (GET_CODE (insn) == NOTE)
4665 {
4666 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4667 note = insn;
4668 }
4669 else if ((len -= contains (insn, epilogue)) == 0)
4670 {
4671 /* Find the epilogue-begin note if we haven't already, and
4672 move it to just before the first epilogue insn. */
4673 if (note == 0)
4674 for (note = insn; note = PREV_INSN (note);)
4675 if (GET_CODE (note) == NOTE
4676 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
4677 break;
4678 next = NEXT_INSN (note);
4679 prev = PREV_INSN (note);
4680 if (prev)
4681 NEXT_INSN (prev) = next;
4682 if (next)
4683 PREV_INSN (next) = prev;
4684 add_insn_after (note, PREV_INSN (insn));
4685 break;
4686 }
4687 }
4688 }
4689 #endif /* HAVE_prologue or HAVE_epilogue */
4690 }
This page took 0.317375 seconds and 6 git commands to generate.