]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
(put_var_into_stack, fixup_var_refs_insns): Minor cleanups.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
29
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
34
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
39
40 #include "config.h"
41
42 #include <stdio.h>
43
44 #include "rtl.h"
45 #include "tree.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57
58 /* Round a value to the lowest integer less than it that is a multiple of
59 the required alignment. Avoid using division in case the value is
60 negative. Assume the alignment is a power of two. */
61 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
62
63 /* Similar, but round to the next highest integer that meets the
64 alignment. */
65 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
66
67 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
68 during rtl generation. If they are different register numbers, this is
69 always true. It may also be true if
70 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
71 generation. See fix_lexical_addr for details. */
72
73 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
74 #define NEED_SEPARATE_AP
75 #endif
76
77 /* Number of bytes of args popped by function being compiled on its return.
78 Zero if no bytes are to be popped.
79 May affect compilation of return insn or of function epilogue. */
80
81 int current_function_pops_args;
82
83 /* Nonzero if function being compiled needs to be given an address
84 where the value should be stored. */
85
86 int current_function_returns_struct;
87
88 /* Nonzero if function being compiled needs to
89 return the address of where it has put a structure value. */
90
91 int current_function_returns_pcc_struct;
92
93 /* Nonzero if function being compiled needs to be passed a static chain. */
94
95 int current_function_needs_context;
96
97 /* Nonzero if function being compiled can call setjmp. */
98
99 int current_function_calls_setjmp;
100
101 /* Nonzero if function being compiled can call longjmp. */
102
103 int current_function_calls_longjmp;
104
105 /* Nonzero if function being compiled receives nonlocal gotos
106 from nested functions. */
107
108 int current_function_has_nonlocal_label;
109
110 /* Nonzero if function being compiled contains nested functions. */
111
112 int current_function_contains_functions;
113
114 /* Nonzero if function being compiled can call alloca,
115 either as a subroutine or builtin. */
116
117 int current_function_calls_alloca;
118
119 /* Nonzero if the current function returns a pointer type */
120
121 int current_function_returns_pointer;
122
123 /* If some insns can be deferred to the delay slots of the epilogue, the
124 delay list for them is recorded here. */
125
126 rtx current_function_epilogue_delay_list;
127
128 /* If function's args have a fixed size, this is that size, in bytes.
129 Otherwise, it is -1.
130 May affect compilation of return insn or of function epilogue. */
131
132 int current_function_args_size;
133
134 /* # bytes the prologue should push and pretend that the caller pushed them.
135 The prologue must do this, but only if parms can be passed in registers. */
136
137 int current_function_pretend_args_size;
138
139 /* # of bytes of outgoing arguments required to be pushed by the prologue.
140 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
141 and no stack adjusts will be done on function calls. */
142
143 int current_function_outgoing_args_size;
144
145 /* This is the offset from the arg pointer to the place where the first
146 anonymous arg can be found, if there is one. */
147
148 rtx current_function_arg_offset_rtx;
149
150 /* Nonzero if current function uses varargs.h or equivalent.
151 Zero for functions that use stdarg.h. */
152
153 int current_function_varargs;
154
155 /* Quantities of various kinds of registers
156 used for the current function's args. */
157
158 CUMULATIVE_ARGS current_function_args_info;
159
160 /* Name of function now being compiled. */
161
162 char *current_function_name;
163
164 /* If non-zero, an RTL expression for that location at which the current
165 function returns its result. Always equal to
166 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
167 independently of the tree structures. */
168
169 rtx current_function_return_rtx;
170
171 /* Nonzero if the current function uses the constant pool. */
172
173 int current_function_uses_const_pool;
174
175 /* Nonzero if the current function uses pic_offset_table_rtx. */
176 int current_function_uses_pic_offset_table;
177
178 /* The arg pointer hard register, or the pseudo into which it was copied. */
179 rtx current_function_internal_arg_pointer;
180
181 /* The FUNCTION_DECL for an inline function currently being expanded. */
182 tree inline_function_decl;
183
184 /* Number of function calls seen so far in current function. */
185
186 int function_call_count;
187
188 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
189 (labels to which there can be nonlocal gotos from nested functions)
190 in this function. */
191
192 tree nonlocal_labels;
193
194 /* RTX for stack slot that holds the current handler for nonlocal gotos.
195 Zero when function does not have nonlocal labels. */
196
197 rtx nonlocal_goto_handler_slot;
198
199 /* RTX for stack slot that holds the stack pointer value to restore
200 for a nonlocal goto.
201 Zero when function does not have nonlocal labels. */
202
203 rtx nonlocal_goto_stack_level;
204
205 /* Label that will go on parm cleanup code, if any.
206 Jumping to this label runs cleanup code for parameters, if
207 such code must be run. Following this code is the logical return label. */
208
209 rtx cleanup_label;
210
211 /* Label that will go on function epilogue.
212 Jumping to this label serves as a "return" instruction
213 on machines which require execution of the epilogue on all returns. */
214
215 rtx return_label;
216
217 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
218 So we can mark them all live at the end of the function, if nonopt. */
219 rtx save_expr_regs;
220
221 /* List (chain of EXPR_LISTs) of all stack slots in this function.
222 Made for the sake of unshare_all_rtl. */
223 rtx stack_slot_list;
224
225 /* Chain of all RTL_EXPRs that have insns in them. */
226 tree rtl_expr_chain;
227
228 /* Label to jump back to for tail recursion, or 0 if we have
229 not yet needed one for this function. */
230 rtx tail_recursion_label;
231
232 /* Place after which to insert the tail_recursion_label if we need one. */
233 rtx tail_recursion_reentry;
234
235 /* Location at which to save the argument pointer if it will need to be
236 referenced. There are two cases where this is done: if nonlocal gotos
237 exist, or if vars stored at an offset from the argument pointer will be
238 needed by inner routines. */
239
240 rtx arg_pointer_save_area;
241
242 /* Offset to end of allocated area of stack frame.
243 If stack grows down, this is the address of the last stack slot allocated.
244 If stack grows up, this is the address for the next slot. */
245 int frame_offset;
246
247 /* List (chain of TREE_LISTs) of static chains for containing functions.
248 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
249 in an RTL_EXPR in the TREE_VALUE. */
250 static tree context_display;
251
252 /* List (chain of TREE_LISTs) of trampolines for nested functions.
253 The trampoline sets up the static chain and jumps to the function.
254 We supply the trampoline's address when the function's address is requested.
255
256 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
257 in an RTL_EXPR in the TREE_VALUE. */
258 static tree trampoline_list;
259
260 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
261 static rtx parm_birth_insn;
262
263 #if 0
264 /* Nonzero if a stack slot has been generated whose address is not
265 actually valid. It means that the generated rtl must all be scanned
266 to detect and correct the invalid addresses where they occur. */
267 static int invalid_stack_slot;
268 #endif
269
270 /* Last insn of those whose job was to put parms into their nominal homes. */
271 static rtx last_parm_insn;
272
273 /* 1 + last pseudo register number used for loading a copy
274 of a parameter of this function. */
275 static int max_parm_reg;
276
277 /* Vector indexed by REGNO, containing location on stack in which
278 to put the parm which is nominally in pseudo register REGNO,
279 if we discover that that parm must go in the stack. */
280 static rtx *parm_reg_stack_loc;
281
282 #if 0 /* Turned off because 0 seems to work just as well. */
283 /* Cleanup lists are required for binding levels regardless of whether
284 that binding level has cleanups or not. This node serves as the
285 cleanup list whenever an empty list is required. */
286 static tree empty_cleanup_list;
287 #endif
288
289 /* Nonzero once virtual register instantiation has been done.
290 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
291 static int virtuals_instantiated;
292
293 /* Nonzero if we need to distinguish between the return value of this function
294 and the return value of a function called by this function. This helps
295 integrate.c */
296
297 extern int rtx_equal_function_value_matters;
298
299 void fixup_gotos ();
300
301 static tree round_down ();
302 static rtx round_trampoline_addr ();
303 static rtx fixup_stack_1 ();
304 static void fixup_var_refs ();
305 static void fixup_var_refs_insns ();
306 static void fixup_var_refs_1 ();
307 static void optimize_bit_field ();
308 static void instantiate_decls ();
309 static void instantiate_decls_1 ();
310 static void instantiate_decl ();
311 static int instantiate_virtual_regs_1 ();
312 static rtx fixup_memory_subreg ();
313 static rtx walk_fixup_memory_subreg ();
314 \f
315 /* In order to evaluate some expressions, such as function calls returning
316 structures in memory, we need to temporarily allocate stack locations.
317 We record each allocated temporary in the following structure.
318
319 Associated with each temporary slot is a nesting level. When we pop up
320 one level, all temporaries associated with the previous level are freed.
321 Normally, all temporaries are freed after the execution of the statement
322 in which they were created. However, if we are inside a ({...}) grouping,
323 the result may be in a temporary and hence must be preserved. If the
324 result could be in a temporary, we preserve it if we can determine which
325 one it is in. If we cannot determine which temporary may contain the
326 result, all temporaries are preserved. A temporary is preserved by
327 pretending it was allocated at the previous nesting level.
328
329 Automatic variables are also assigned temporary slots, at the nesting
330 level where they are defined. They are marked a "kept" so that
331 free_temp_slots will not free them. */
332
333 struct temp_slot
334 {
335 /* Points to next temporary slot. */
336 struct temp_slot *next;
337 /* The rtx to used to reference the slot. */
338 rtx slot;
339 /* The size, in units, of the slot. */
340 int size;
341 /* Non-zero if this temporary is currently in use. */
342 char in_use;
343 /* Nesting level at which this slot is being used. */
344 int level;
345 /* Non-zero if this should survive a call to free_temp_slots. */
346 int keep;
347 };
348
349 /* List of all temporaries allocated, both available and in use. */
350
351 struct temp_slot *temp_slots;
352
353 /* Current nesting level for temporaries. */
354
355 int temp_slot_level;
356 \f
357 /* Pointer to chain of `struct function' for containing functions. */
358 struct function *outer_function_chain;
359
360 /* Given a function decl for a containing function,
361 return the `struct function' for it. */
362
363 struct function *
364 find_function_data (decl)
365 tree decl;
366 {
367 struct function *p;
368 for (p = outer_function_chain; p; p = p->next)
369 if (p->decl == decl)
370 return p;
371 abort ();
372 }
373
374 /* Save the current context for compilation of a nested function.
375 This is called from language-specific code.
376 The caller is responsible for saving any language-specific status,
377 since this function knows only about language-independent variables. */
378
379 void
380 push_function_context ()
381 {
382 struct function *p = (struct function *) xmalloc (sizeof (struct function));
383
384 p->next = outer_function_chain;
385 outer_function_chain = p;
386
387 p->name = current_function_name;
388 p->decl = current_function_decl;
389 p->pops_args = current_function_pops_args;
390 p->returns_struct = current_function_returns_struct;
391 p->returns_pcc_struct = current_function_returns_pcc_struct;
392 p->needs_context = current_function_needs_context;
393 p->calls_setjmp = current_function_calls_setjmp;
394 p->calls_longjmp = current_function_calls_longjmp;
395 p->calls_alloca = current_function_calls_alloca;
396 p->has_nonlocal_label = current_function_has_nonlocal_label;
397 p->args_size = current_function_args_size;
398 p->pretend_args_size = current_function_pretend_args_size;
399 p->arg_offset_rtx = current_function_arg_offset_rtx;
400 p->uses_const_pool = current_function_uses_const_pool;
401 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
402 p->internal_arg_pointer = current_function_internal_arg_pointer;
403 p->max_parm_reg = max_parm_reg;
404 p->parm_reg_stack_loc = parm_reg_stack_loc;
405 p->outgoing_args_size = current_function_outgoing_args_size;
406 p->return_rtx = current_function_return_rtx;
407 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
408 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
409 p->nonlocal_labels = nonlocal_labels;
410 p->cleanup_label = cleanup_label;
411 p->return_label = return_label;
412 p->save_expr_regs = save_expr_regs;
413 p->stack_slot_list = stack_slot_list;
414 p->parm_birth_insn = parm_birth_insn;
415 p->frame_offset = frame_offset;
416 p->tail_recursion_label = tail_recursion_label;
417 p->tail_recursion_reentry = tail_recursion_reentry;
418 p->arg_pointer_save_area = arg_pointer_save_area;
419 p->rtl_expr_chain = rtl_expr_chain;
420 p->last_parm_insn = last_parm_insn;
421 p->context_display = context_display;
422 p->trampoline_list = trampoline_list;
423 p->function_call_count = function_call_count;
424 p->temp_slots = temp_slots;
425 p->temp_slot_level = temp_slot_level;
426 p->fixup_var_refs_queue = 0;
427 p->epilogue_delay_list = current_function_epilogue_delay_list;
428
429 save_tree_status (p);
430 save_storage_status (p);
431 save_emit_status (p);
432 init_emit ();
433 save_expr_status (p);
434 save_stmt_status (p);
435 save_varasm_status (p);
436 }
437
438 /* Restore the last saved context, at the end of a nested function.
439 This function is called from language-specific code. */
440
441 void
442 pop_function_context ()
443 {
444 struct function *p = outer_function_chain;
445
446 outer_function_chain = p->next;
447
448 current_function_name = p->name;
449 current_function_decl = p->decl;
450 current_function_pops_args = p->pops_args;
451 current_function_returns_struct = p->returns_struct;
452 current_function_returns_pcc_struct = p->returns_pcc_struct;
453 current_function_needs_context = p->needs_context;
454 current_function_calls_setjmp = p->calls_setjmp;
455 current_function_calls_longjmp = p->calls_longjmp;
456 current_function_calls_alloca = p->calls_alloca;
457 current_function_has_nonlocal_label = p->has_nonlocal_label;
458 current_function_contains_functions = 1;
459 current_function_args_size = p->args_size;
460 current_function_pretend_args_size = p->pretend_args_size;
461 current_function_arg_offset_rtx = p->arg_offset_rtx;
462 current_function_uses_const_pool = p->uses_const_pool;
463 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
464 current_function_internal_arg_pointer = p->internal_arg_pointer;
465 max_parm_reg = p->max_parm_reg;
466 parm_reg_stack_loc = p->parm_reg_stack_loc;
467 current_function_outgoing_args_size = p->outgoing_args_size;
468 current_function_return_rtx = p->return_rtx;
469 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
470 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
471 nonlocal_labels = p->nonlocal_labels;
472 cleanup_label = p->cleanup_label;
473 return_label = p->return_label;
474 save_expr_regs = p->save_expr_regs;
475 stack_slot_list = p->stack_slot_list;
476 parm_birth_insn = p->parm_birth_insn;
477 frame_offset = p->frame_offset;
478 tail_recursion_label = p->tail_recursion_label;
479 tail_recursion_reentry = p->tail_recursion_reentry;
480 arg_pointer_save_area = p->arg_pointer_save_area;
481 rtl_expr_chain = p->rtl_expr_chain;
482 last_parm_insn = p->last_parm_insn;
483 context_display = p->context_display;
484 trampoline_list = p->trampoline_list;
485 function_call_count = p->function_call_count;
486 temp_slots = p->temp_slots;
487 temp_slot_level = p->temp_slot_level;
488 current_function_epilogue_delay_list = p->epilogue_delay_list;
489
490 restore_tree_status (p);
491 restore_storage_status (p);
492 restore_expr_status (p);
493 restore_emit_status (p);
494 restore_stmt_status (p);
495 restore_varasm_status (p);
496
497 /* Finish doing put_var_into_stack for any of our variables
498 which became addressable during the nested function. */
499 {
500 struct var_refs_queue *queue = p->fixup_var_refs_queue;
501 for (; queue; queue = queue->next)
502 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
503 }
504
505 free (p);
506
507 /* Reset variables that have known state during rtx generation. */
508 rtx_equal_function_value_matters = 1;
509 virtuals_instantiated = 0;
510 }
511 \f
512 /* Allocate fixed slots in the stack frame of the current function. */
513
514 /* Return size needed for stack frame based on slots so far allocated.
515 This size counts from zero. It is not rounded to STACK_BOUNDARY;
516 the caller may have to do that. */
517
518 int
519 get_frame_size ()
520 {
521 #ifdef FRAME_GROWS_DOWNWARD
522 return -frame_offset;
523 #else
524 return frame_offset;
525 #endif
526 }
527
528 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
529 with machine mode MODE.
530
531 ALIGN controls the amount of alignment for the address of the slot:
532 0 means according to MODE,
533 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
534 positive specifies alignment boundary in bits.
535
536 We do not round to stack_boundary here. */
537
538 rtx
539 assign_stack_local (mode, size, align)
540 enum machine_mode mode;
541 int size;
542 int align;
543 {
544 register rtx x, addr;
545 int bigend_correction = 0;
546 int alignment;
547
548 if (align == 0)
549 {
550 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
551 if (mode == BLKmode)
552 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
553 }
554 else if (align == -1)
555 {
556 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
557 size = CEIL_ROUND (size, alignment);
558 }
559 else
560 alignment = align / BITS_PER_UNIT;
561
562 /* Round frame offset to that alignment.
563 We must be careful here, since FRAME_OFFSET might be negative and
564 division with a negative dividend isn't as well defined as we might
565 like. So we instead assume that ALIGNMENT is a power of two and
566 use logical operations which are unambiguous. */
567 #ifdef FRAME_GROWS_DOWNWARD
568 frame_offset = FLOOR_ROUND (frame_offset, alignment);
569 #else
570 frame_offset = CEIL_ROUND (frame_offset, alignment);
571 #endif
572
573 /* On a big-endian machine, if we are allocating more space than we will use,
574 use the least significant bytes of those that are allocated. */
575 #if BYTES_BIG_ENDIAN
576 if (mode != BLKmode)
577 bigend_correction = size - GET_MODE_SIZE (mode);
578 #endif
579
580 #ifdef FRAME_GROWS_DOWNWARD
581 frame_offset -= size;
582 #endif
583
584 /* If we have already instantiated virtual registers, return the actual
585 address relative to the frame pointer. */
586 if (virtuals_instantiated)
587 addr = plus_constant (frame_pointer_rtx,
588 (frame_offset + bigend_correction
589 + STARTING_FRAME_OFFSET));
590 else
591 addr = plus_constant (virtual_stack_vars_rtx,
592 frame_offset + bigend_correction);
593
594 #ifndef FRAME_GROWS_DOWNWARD
595 frame_offset += size;
596 #endif
597
598 x = gen_rtx (MEM, mode, addr);
599
600 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
601
602 return x;
603 }
604
605 /* Assign a stack slot in a containing function.
606 First three arguments are same as in preceding function.
607 The last argument specifies the function to allocate in. */
608
609 rtx
610 assign_outer_stack_local (mode, size, align, function)
611 enum machine_mode mode;
612 int size;
613 int align;
614 struct function *function;
615 {
616 register rtx x, addr;
617 int bigend_correction = 0;
618 int alignment;
619
620 /* Allocate in the memory associated with the function in whose frame
621 we are assigning. */
622 push_obstacks (function->function_obstack,
623 function->function_maybepermanent_obstack);
624
625 if (align == 0)
626 {
627 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
628 if (mode == BLKmode)
629 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
630 }
631 else if (align == -1)
632 {
633 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
634 size = CEIL_ROUND (size, alignment);
635 }
636 else
637 alignment = align / BITS_PER_UNIT;
638
639 /* Round frame offset to that alignment. */
640 #ifdef FRAME_GROWS_DOWNWARD
641 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
642 #else
643 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
644 #endif
645
646 /* On a big-endian machine, if we are allocating more space than we will use,
647 use the least significant bytes of those that are allocated. */
648 #if BYTES_BIG_ENDIAN
649 if (mode != BLKmode)
650 bigend_correction = size - GET_MODE_SIZE (mode);
651 #endif
652
653 #ifdef FRAME_GROWS_DOWNWARD
654 function->frame_offset -= size;
655 #endif
656 addr = plus_constant (virtual_stack_vars_rtx,
657 function->frame_offset + bigend_correction);
658 #ifndef FRAME_GROWS_DOWNWARD
659 function->frame_offset += size;
660 #endif
661
662 x = gen_rtx (MEM, mode, addr);
663
664 function->stack_slot_list
665 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
666
667 pop_obstacks ();
668
669 return x;
670 }
671 \f
672 /* Allocate a temporary stack slot and record it for possible later
673 reuse.
674
675 MODE is the machine mode to be given to the returned rtx.
676
677 SIZE is the size in units of the space required. We do no rounding here
678 since assign_stack_local will do any required rounding.
679
680 KEEP is non-zero if this slot is to be retained after a call to
681 free_temp_slots. Automatic variables for a block are allocated with this
682 flag. */
683
684 rtx
685 assign_stack_temp (mode, size, keep)
686 enum machine_mode mode;
687 int size;
688 int keep;
689 {
690 struct temp_slot *p, *best_p = 0;
691
692 /* First try to find an available, already-allocated temporary that is the
693 exact size we require. */
694 for (p = temp_slots; p; p = p->next)
695 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
696 break;
697
698 /* If we didn't find, one, try one that is larger than what we want. We
699 find the smallest such. */
700 if (p == 0)
701 for (p = temp_slots; p; p = p->next)
702 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
703 && (best_p == 0 || best_p->size > p->size))
704 best_p = p;
705
706 /* Make our best, if any, the one to use. */
707 if (best_p)
708 p = best_p;
709
710 /* If we still didn't find one, make a new temporary. */
711 if (p == 0)
712 {
713 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
714 p->size = size;
715 /* If the temp slot mode doesn't indicate the alignment,
716 use the largest possible, so no one will be disappointed. */
717 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
718 p->next = temp_slots;
719 temp_slots = p;
720 }
721
722 p->in_use = 1;
723 p->level = temp_slot_level;
724 p->keep = keep;
725 return p->slot;
726 }
727 \f
728 /* If X could be a reference to a temporary slot, mark that slot as belonging
729 to the to one level higher. If X matched one of our slots, just mark that
730 one. Otherwise, we can't easily predict which it is, so upgrade all of
731 them. Kept slots need not be touched.
732
733 This is called when an ({...}) construct occurs and a statement
734 returns a value in memory. */
735
736 void
737 preserve_temp_slots (x)
738 rtx x;
739 {
740 struct temp_slot *p;
741
742 /* If X is not in memory or is at a constant address, it cannot be in
743 a temporary slot. */
744 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
745 return;
746
747 /* First see if we can find a match. */
748 for (p = temp_slots; p; p = p->next)
749 if (p->in_use && x == p->slot)
750 {
751 p->level--;
752 return;
753 }
754
755 /* Otherwise, preserve all non-kept slots at this level. */
756 for (p = temp_slots; p; p = p->next)
757 if (p->in_use && p->level == temp_slot_level && ! p->keep)
758 p->level--;
759 }
760
761 /* Free all temporaries used so far. This is normally called at the end
762 of generating code for a statement. */
763
764 void
765 free_temp_slots ()
766 {
767 struct temp_slot *p;
768
769 for (p = temp_slots; p; p = p->next)
770 if (p->in_use && p->level == temp_slot_level && ! p->keep)
771 p->in_use = 0;
772 }
773
774 /* Push deeper into the nesting level for stack temporaries. */
775
776 void
777 push_temp_slots ()
778 {
779 /* For GNU C++, we must allow a sequence to be emitted anywhere in
780 the level where the sequence was started. By not changing levels
781 when the compiler is inside a sequence, the temporaries for the
782 sequence and the temporaries will not unwittingly conflict with
783 the temporaries for other sequences and/or code at that level. */
784 if (in_sequence_p ())
785 return;
786
787 temp_slot_level++;
788 }
789
790 /* Pop a temporary nesting level. All slots in use in the current level
791 are freed. */
792
793 void
794 pop_temp_slots ()
795 {
796 struct temp_slot *p;
797
798 /* See comment in push_temp_slots about why we don't change levels
799 in sequences. */
800 if (in_sequence_p ())
801 return;
802
803 for (p = temp_slots; p; p = p->next)
804 if (p->in_use && p->level == temp_slot_level)
805 p->in_use = 0;
806
807 temp_slot_level--;
808 }
809 \f
810 /* Retroactively move an auto variable from a register to a stack slot.
811 This is done when an address-reference to the variable is seen. */
812
813 void
814 put_var_into_stack (decl)
815 tree decl;
816 {
817 register rtx reg;
818 register rtx new = 0;
819 enum machine_mode promoted_mode, decl_mode;
820 struct function *function = 0;
821 tree context = decl_function_context (decl);
822
823 /* Get the current rtl used for this object and it's original mode. */
824 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
825
826 /* No need to do anything if decl has no rtx yet
827 since in that case caller is setting TREE_ADDRESSABLE
828 and a stack slot will be assigned when the rtl is made. */
829 if (reg == 0)
830 return;
831
832 /* Get the declared mode for this object. */
833 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
834 : DECL_MODE (decl));
835 /* Get the mode it's actually stored in. */
836 promoted_mode = GET_MODE (reg);
837
838 /* If this variable comes from an outer function,
839 find that function's saved context. */
840 if (context != current_function_decl)
841 for (function = outer_function_chain; function; function = function->next)
842 if (function->decl == context)
843 break;
844
845 /* If this is a variable-size object with a pseudo to address it,
846 put that pseudo into the stack, if the var is nonlocal. */
847 if (DECL_NONLOCAL (decl)
848 && GET_CODE (reg) == MEM
849 && GET_CODE (XEXP (reg, 0)) == REG
850 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
851 {
852 reg = XEXP (reg, 0);
853 decl_mode = promoted_mode = GET_MODE (reg);
854 }
855
856 if (GET_CODE (reg) != REG)
857 return;
858
859 if (function)
860 {
861 if (REGNO (reg) < function->max_parm_reg)
862 new = function->parm_reg_stack_loc[REGNO (reg)];
863 if (new == 0)
864 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
865 0, function);
866 }
867 else
868 {
869 if (REGNO (reg) < max_parm_reg)
870 new = parm_reg_stack_loc[REGNO (reg)];
871 if (new == 0)
872 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
873 }
874
875 XEXP (reg, 0) = XEXP (new, 0);
876 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
877 REG_USERVAR_P (reg) = 0;
878 PUT_CODE (reg, MEM);
879 PUT_MODE (reg, decl_mode);
880
881 /* If this is a memory ref that contains aggregate components,
882 mark it as such for cse and loop optimize. */
883 MEM_IN_STRUCT_P (reg)
884 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
885 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
886 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
887
888 /* Now make sure that all refs to the variable, previously made
889 when it was a register, are fixed up to be valid again. */
890 if (function)
891 {
892 struct var_refs_queue *temp;
893
894 /* Variable is inherited; fix it up when we get back to its function. */
895 push_obstacks (function->function_obstack,
896 function->function_maybepermanent_obstack);
897 temp
898 = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue));
899 temp->modified = reg;
900 temp->promoted_mode = promoted_mode;
901 temp->unsignedp = TREE_UNSIGNED (TREE_TYPE (decl));
902 temp->next = function->fixup_var_refs_queue;
903 function->fixup_var_refs_queue = temp;
904 pop_obstacks ();
905 }
906 else
907 /* Variable is local; fix it up now. */
908 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (TREE_TYPE (decl)));
909 }
910 \f
911 static void
912 fixup_var_refs (var, promoted_mode, unsignedp)
913 rtx var;
914 enum machine_mode promoted_mode;
915 int unsignedp;
916 {
917 tree pending;
918 rtx first_insn = get_insns ();
919 struct sequence_stack *stack = sequence_stack;
920 tree rtl_exps = rtl_expr_chain;
921
922 /* Must scan all insns for stack-refs that exceed the limit. */
923 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
924
925 /* Scan all pending sequences too. */
926 for (; stack; stack = stack->next)
927 {
928 push_to_sequence (stack->first);
929 fixup_var_refs_insns (var, promoted_mode, unsignedp,
930 stack->first, stack->next != 0);
931 /* Update remembered end of sequence
932 in case we added an insn at the end. */
933 stack->last = get_last_insn ();
934 end_sequence ();
935 }
936
937 /* Scan all waiting RTL_EXPRs too. */
938 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
939 {
940 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
941 if (seq != const0_rtx && seq != 0)
942 {
943 push_to_sequence (seq);
944 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
945 end_sequence ();
946 }
947 }
948 }
949 \f
950 /* This structure is used by the following two functions to record MEMs or
951 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
952 VAR as an address. We need to maintain this list in case two operands of
953 an insn were required to match; in that case we must ensure we use the
954 same replacement. */
955
956 struct fixup_replacement
957 {
958 rtx old;
959 rtx new;
960 struct fixup_replacement *next;
961 };
962
963 /* REPLACEMENTS is a pointer to a list of the above structures and X is
964 some part of an insn. Return a struct fixup_replacement whose OLD
965 value is equal to X. Allocate a new structure if no such entry exists. */
966
967 static struct fixup_replacement *
968 find_fixup_replacement (replacements, x)
969 struct fixup_replacement **replacements;
970 rtx x;
971 {
972 struct fixup_replacement *p;
973
974 /* See if we have already replaced this. */
975 for (p = *replacements; p && p->old != x; p = p->next)
976 ;
977
978 if (p == 0)
979 {
980 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
981 p->old = x;
982 p->new = 0;
983 p->next = *replacements;
984 *replacements = p;
985 }
986
987 return p;
988 }
989
990 /* Scan the insn-chain starting with INSN for refs to VAR
991 and fix them up. TOPLEVEL is nonzero if this chain is the
992 main chain of insns for the current function. */
993
994 static void
995 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
996 rtx var;
997 enum machine_mode promoted_mode;
998 int unsignedp;
999 rtx insn;
1000 int toplevel;
1001 {
1002 while (insn)
1003 {
1004 rtx next = NEXT_INSN (insn);
1005 rtx note;
1006 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1007 {
1008 /* The insn to load VAR from a home in the arglist
1009 is now a no-op. When we see it, just delete it. */
1010 if (toplevel
1011 && GET_CODE (PATTERN (insn)) == SET
1012 && SET_DEST (PATTERN (insn)) == var
1013 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1014 {
1015 /* In unoptimized compilation, we shouldn't call delete_insn
1016 except in jump.c doing warnings. */
1017 PUT_CODE (insn, NOTE);
1018 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1019 NOTE_SOURCE_FILE (insn) = 0;
1020 if (insn == last_parm_insn)
1021 last_parm_insn = PREV_INSN (next);
1022 }
1023 else
1024 {
1025 /* See if we have to do anything to INSN now that VAR is in
1026 memory. If it needs to be loaded into a pseudo, use a single
1027 pseudo for the entire insn in case there is a MATCH_DUP
1028 between two operands. We pass a pointer to the head of
1029 a list of struct fixup_replacements. If fixup_var_refs_1
1030 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1031 it will record them in this list.
1032
1033 If it allocated a pseudo for any replacement, we copy into
1034 it here. */
1035
1036 struct fixup_replacement *replacements = 0;
1037 rtx next_insn = NEXT_INSN (insn);
1038
1039 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1040 &replacements);
1041
1042 /* If this is last_parm_insn, and any instructions were output
1043 after it to fix it up, then we must set last_parm_insn to
1044 the last such instruction emitted. */
1045 if (insn == last_parm_insn)
1046 last_parm_insn = PREV_INSN (next_insn);
1047
1048 while (replacements)
1049 {
1050 if (GET_CODE (replacements->new) == REG)
1051 {
1052 rtx insert_before;
1053 rtx seq;
1054
1055 /* OLD might be a (subreg (mem)). */
1056 if (GET_CODE (replacements->old) == SUBREG)
1057 replacements->old
1058 = fixup_memory_subreg (replacements->old, insn, 0);
1059 else
1060 replacements->old
1061 = fixup_stack_1 (replacements->old, insn);
1062
1063 /* We can not separate USE insns from the CALL_INSN
1064 that they belong to. If this is a CALL_INSN, insert
1065 the move insn before the USE insns preceding it
1066 instead of immediately before the insn. */
1067 if (GET_CODE (insn) == CALL_INSN)
1068 {
1069 insert_before = insn;
1070 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1071 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1072 insert_before = PREV_INSN (insert_before);
1073 }
1074 else
1075 insert_before = insn;
1076
1077 /* If we are changing the mode, do a conversion.
1078 This might be wasteful, but combine.c will
1079 eliminate much of the waste. */
1080
1081 if (GET_MODE (replacements->new)
1082 != GET_MODE (replacements->old))
1083 {
1084 start_sequence ();
1085 convert_move (replacements->new,
1086 replacements->old, unsignedp);
1087 seq = gen_sequence ();
1088 end_sequence ();
1089 }
1090 else
1091 seq = gen_move_insn (replacements->new,
1092 replacements->old);
1093
1094 emit_insn_before (seq, insert_before);
1095 }
1096
1097 replacements = replacements->next;
1098 }
1099 }
1100
1101 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1102 But don't touch other insns referred to by reg-notes;
1103 we will get them elsewhere. */
1104 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1105 if (GET_CODE (note) != INSN_LIST)
1106 XEXP (note, 0) = walk_fixup_memory_subreg (XEXP (note, 0), insn);
1107 }
1108 insn = next;
1109 }
1110 }
1111 \f
1112 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1113 See if the rtx expression at *LOC in INSN needs to be changed.
1114
1115 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1116 contain a list of original rtx's and replacements. If we find that we need
1117 to modify this insn by replacing a memory reference with a pseudo or by
1118 making a new MEM to implement a SUBREG, we consult that list to see if
1119 we have already chosen a replacement. If none has already been allocated,
1120 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1121 or the SUBREG, as appropriate, to the pseudo. */
1122
1123 static void
1124 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1125 register rtx var;
1126 enum machine_mode promoted_mode;
1127 register rtx *loc;
1128 rtx insn;
1129 struct fixup_replacement **replacements;
1130 {
1131 register int i;
1132 register rtx x = *loc;
1133 RTX_CODE code = GET_CODE (x);
1134 register char *fmt;
1135 register rtx tem, tem1;
1136 struct fixup_replacement *replacement;
1137
1138 switch (code)
1139 {
1140 case MEM:
1141 if (var == x)
1142 {
1143 /* If we already have a replacement, use it. Otherwise,
1144 try to fix up this address in case it is invalid. */
1145
1146 replacement = find_fixup_replacement (replacements, var);
1147 if (replacement->new)
1148 {
1149 *loc = replacement->new;
1150 return;
1151 }
1152
1153 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1154
1155 /* Unless we are forcing memory to register or we changed the mode,
1156 we can leave things the way they are if the insn is valid. */
1157
1158 INSN_CODE (insn) = -1;
1159 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1160 && recog_memoized (insn) >= 0)
1161 return;
1162
1163 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1164 return;
1165 }
1166
1167 /* If X contains VAR, we need to unshare it here so that we update
1168 each occurrence separately. But all identical MEMs in one insn
1169 must be replaced with the same rtx because of the possibility of
1170 MATCH_DUPs. */
1171
1172 if (reg_mentioned_p (var, x))
1173 {
1174 replacement = find_fixup_replacement (replacements, x);
1175 if (replacement->new == 0)
1176 replacement->new = copy_most_rtx (x, var);
1177
1178 *loc = x = replacement->new;
1179 }
1180 break;
1181
1182 case REG:
1183 case CC0:
1184 case PC:
1185 case CONST_INT:
1186 case CONST:
1187 case SYMBOL_REF:
1188 case LABEL_REF:
1189 case CONST_DOUBLE:
1190 return;
1191
1192 case SIGN_EXTRACT:
1193 case ZERO_EXTRACT:
1194 /* Note that in some cases those types of expressions are altered
1195 by optimize_bit_field, and do not survive to get here. */
1196 if (XEXP (x, 0) == var
1197 || (GET_CODE (XEXP (x, 0)) == SUBREG
1198 && SUBREG_REG (XEXP (x, 0)) == var))
1199 {
1200 /* Get TEM as a valid MEM in the mode presently in the insn.
1201
1202 We don't worry about the possibility of MATCH_DUP here; it
1203 is highly unlikely and would be tricky to handle. */
1204
1205 tem = XEXP (x, 0);
1206 if (GET_CODE (tem) == SUBREG)
1207 tem = fixup_memory_subreg (tem, insn, 1);
1208 tem = fixup_stack_1 (tem, insn);
1209
1210 /* Unless we want to load from memory, get TEM into the proper mode
1211 for an extract from memory. This can only be done if the
1212 extract is at a constant position and length. */
1213
1214 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1215 && GET_CODE (XEXP (x, 2)) == CONST_INT
1216 && ! mode_dependent_address_p (XEXP (tem, 0))
1217 && ! MEM_VOLATILE_P (tem))
1218 {
1219 enum machine_mode wanted_mode = VOIDmode;
1220 enum machine_mode is_mode = GET_MODE (tem);
1221 int width = INTVAL (XEXP (x, 1));
1222 int pos = INTVAL (XEXP (x, 2));
1223
1224 #ifdef HAVE_extzv
1225 if (GET_CODE (x) == ZERO_EXTRACT)
1226 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1227 #endif
1228 #ifdef HAVE_extv
1229 if (GET_CODE (x) == SIGN_EXTRACT)
1230 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1231 #endif
1232 /* If we have a narrower mode, we can do something. */
1233 if (wanted_mode != VOIDmode
1234 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1235 {
1236 int offset = pos / BITS_PER_UNIT;
1237 rtx old_pos = XEXP (x, 2);
1238 rtx newmem;
1239
1240 /* If the bytes and bits are counted differently, we
1241 must adjust the offset. */
1242 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1243 offset = (GET_MODE_SIZE (is_mode)
1244 - GET_MODE_SIZE (wanted_mode) - offset);
1245 #endif
1246
1247 pos %= GET_MODE_BITSIZE (wanted_mode);
1248
1249 newmem = gen_rtx (MEM, wanted_mode,
1250 plus_constant (XEXP (tem, 0), offset));
1251 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1252 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1253 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1254
1255 /* Make the change and see if the insn remains valid. */
1256 INSN_CODE (insn) = -1;
1257 XEXP (x, 0) = newmem;
1258 XEXP (x, 2) = GEN_INT (pos);
1259
1260 if (recog_memoized (insn) >= 0)
1261 return;
1262
1263 /* Otherwise, restore old position. XEXP (x, 0) will be
1264 restored later. */
1265 XEXP (x, 2) = old_pos;
1266 }
1267 }
1268
1269 /* If we get here, the bitfield extract insn can't accept a memory
1270 reference. Copy the input into a register. */
1271
1272 tem1 = gen_reg_rtx (GET_MODE (tem));
1273 emit_insn_before (gen_move_insn (tem1, tem), insn);
1274 XEXP (x, 0) = tem1;
1275 return;
1276 }
1277 break;
1278
1279 case SUBREG:
1280 if (SUBREG_REG (x) == var)
1281 {
1282 /* If this is a special SUBREG made because VAR was promoted
1283 from a wider mode, replace it with VAR and call ourself
1284 recursively, this time saying that the object previously
1285 had its current mode (by virtue of the SUBREG). */
1286
1287 if (SUBREG_PROMOTED_VAR_P (x))
1288 {
1289 *loc = var;
1290 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1291 return;
1292 }
1293
1294 /* If this SUBREG makes VAR wider, it has become a paradoxical
1295 SUBREG with VAR in memory, but these aren't allowed at this
1296 stage of the compilation. So load VAR into a pseudo and take
1297 a SUBREG of that pseudo. */
1298 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1299 {
1300 replacement = find_fixup_replacement (replacements, var);
1301 if (replacement->new == 0)
1302 replacement->new = gen_reg_rtx (GET_MODE (var));
1303 SUBREG_REG (x) = replacement->new;
1304 return;
1305 }
1306
1307 /* See if we have already found a replacement for this SUBREG.
1308 If so, use it. Otherwise, make a MEM and see if the insn
1309 is recognized. If not, or if we should force MEM into a register,
1310 make a pseudo for this SUBREG. */
1311 replacement = find_fixup_replacement (replacements, x);
1312 if (replacement->new)
1313 {
1314 *loc = replacement->new;
1315 return;
1316 }
1317
1318 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1319
1320 INSN_CODE (insn) = -1;
1321 if (! flag_force_mem && recog_memoized (insn) >= 0)
1322 return;
1323
1324 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1325 return;
1326 }
1327 break;
1328
1329 case SET:
1330 /* First do special simplification of bit-field references. */
1331 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1332 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1333 optimize_bit_field (x, insn, 0);
1334 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1335 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1336 optimize_bit_field (x, insn, NULL_PTR);
1337
1338 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1339 insn into a pseudo and store the low part of the pseudo into VAR. */
1340 if (GET_CODE (SET_DEST (x)) == SUBREG
1341 && SUBREG_REG (SET_DEST (x)) == var
1342 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1343 > GET_MODE_SIZE (GET_MODE (var))))
1344 {
1345 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1346 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1347 tem)),
1348 insn);
1349 break;
1350 }
1351
1352 {
1353 rtx dest = SET_DEST (x);
1354 rtx src = SET_SRC (x);
1355 rtx outerdest = dest;
1356
1357 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1358 || GET_CODE (dest) == SIGN_EXTRACT
1359 || GET_CODE (dest) == ZERO_EXTRACT)
1360 dest = XEXP (dest, 0);
1361
1362 if (GET_CODE (src) == SUBREG)
1363 src = XEXP (src, 0);
1364
1365 /* If VAR does not appear at the top level of the SET
1366 just scan the lower levels of the tree. */
1367
1368 if (src != var && dest != var)
1369 break;
1370
1371 /* We will need to rerecognize this insn. */
1372 INSN_CODE (insn) = -1;
1373
1374 #ifdef HAVE_insv
1375 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1376 {
1377 /* Since this case will return, ensure we fixup all the
1378 operands here. */
1379 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1380 insn, replacements);
1381 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1382 insn, replacements);
1383 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1384 insn, replacements);
1385
1386 tem = XEXP (outerdest, 0);
1387
1388 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1389 that may appear inside a ZERO_EXTRACT.
1390 This was legitimate when the MEM was a REG. */
1391 if (GET_CODE (tem) == SUBREG
1392 && SUBREG_REG (tem) == var)
1393 tem = fixup_memory_subreg (tem, insn, 1);
1394 else
1395 tem = fixup_stack_1 (tem, insn);
1396
1397 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1398 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1399 && ! mode_dependent_address_p (XEXP (tem, 0))
1400 && ! MEM_VOLATILE_P (tem))
1401 {
1402 enum machine_mode wanted_mode
1403 = insn_operand_mode[(int) CODE_FOR_insv][0];
1404 enum machine_mode is_mode = GET_MODE (tem);
1405 int width = INTVAL (XEXP (outerdest, 1));
1406 int pos = INTVAL (XEXP (outerdest, 2));
1407
1408 /* If we have a narrower mode, we can do something. */
1409 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1410 {
1411 int offset = pos / BITS_PER_UNIT;
1412 rtx old_pos = XEXP (outerdest, 2);
1413 rtx newmem;
1414
1415 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1416 offset = (GET_MODE_SIZE (is_mode)
1417 - GET_MODE_SIZE (wanted_mode) - offset);
1418 #endif
1419
1420 pos %= GET_MODE_BITSIZE (wanted_mode);
1421
1422 newmem = gen_rtx (MEM, wanted_mode,
1423 plus_constant (XEXP (tem, 0), offset));
1424 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1425 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1426 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1427
1428 /* Make the change and see if the insn remains valid. */
1429 INSN_CODE (insn) = -1;
1430 XEXP (outerdest, 0) = newmem;
1431 XEXP (outerdest, 2) = GEN_INT (pos);
1432
1433 if (recog_memoized (insn) >= 0)
1434 return;
1435
1436 /* Otherwise, restore old position. XEXP (x, 0) will be
1437 restored later. */
1438 XEXP (outerdest, 2) = old_pos;
1439 }
1440 }
1441
1442 /* If we get here, the bit-field store doesn't allow memory
1443 or isn't located at a constant position. Load the value into
1444 a register, do the store, and put it back into memory. */
1445
1446 tem1 = gen_reg_rtx (GET_MODE (tem));
1447 emit_insn_before (gen_move_insn (tem1, tem), insn);
1448 emit_insn_after (gen_move_insn (tem, tem1), insn);
1449 XEXP (outerdest, 0) = tem1;
1450 return;
1451 }
1452 #endif
1453
1454 /* STRICT_LOW_PART is a no-op on memory references
1455 and it can cause combinations to be unrecognizable,
1456 so eliminate it. */
1457
1458 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1459 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1460
1461 /* A valid insn to copy VAR into or out of a register
1462 must be left alone, to avoid an infinite loop here.
1463 If the reference to VAR is by a subreg, fix that up,
1464 since SUBREG is not valid for a memref.
1465 Also fix up the address of the stack slot.
1466
1467 Note that we must not try to recognize the insn until
1468 after we know that we have valid addresses and no
1469 (subreg (mem ...) ...) constructs, since these interfere
1470 with determining the validity of the insn. */
1471
1472 if ((SET_SRC (x) == var
1473 || (GET_CODE (SET_SRC (x)) == SUBREG
1474 && SUBREG_REG (SET_SRC (x)) == var))
1475 && (GET_CODE (SET_DEST (x)) == REG
1476 || (GET_CODE (SET_DEST (x)) == SUBREG
1477 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1478 && x == single_set (PATTERN (insn)))
1479 {
1480 rtx pat;
1481
1482 replacement = find_fixup_replacement (replacements, SET_SRC (x));
1483 if (replacement->new)
1484 SET_SRC (x) = replacement->new;
1485 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1486 SET_SRC (x) = replacement->new
1487 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1488 else
1489 SET_SRC (x) = replacement->new
1490 = fixup_stack_1 (SET_SRC (x), insn);
1491
1492 if (recog_memoized (insn) >= 0)
1493 return;
1494
1495 /* INSN is not valid, but we know that we want to
1496 copy SET_SRC (x) to SET_DEST (x) in some way. So
1497 we generate the move and see whether it requires more
1498 than one insn. If it does, we emit those insns and
1499 delete INSN. Otherwise, we an just replace the pattern
1500 of INSN; we have already verified above that INSN has
1501 no other function that to do X. */
1502
1503 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
1504 if (GET_CODE (pat) == SEQUENCE)
1505 {
1506 emit_insn_after (pat, insn);
1507 PUT_CODE (insn, NOTE);
1508 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1509 NOTE_SOURCE_FILE (insn) = 0;
1510 }
1511 else
1512 PATTERN (insn) = pat;
1513
1514 return;
1515 }
1516
1517 if ((SET_DEST (x) == var
1518 || (GET_CODE (SET_DEST (x)) == SUBREG
1519 && SUBREG_REG (SET_DEST (x)) == var))
1520 && (GET_CODE (SET_SRC (x)) == REG
1521 || (GET_CODE (SET_SRC (x)) == SUBREG
1522 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1523 && x == single_set (PATTERN (insn)))
1524 {
1525 rtx pat;
1526
1527 if (GET_CODE (SET_DEST (x)) == SUBREG)
1528 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1529 else
1530 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1531
1532 if (recog_memoized (insn) >= 0)
1533 return;
1534
1535 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
1536 if (GET_CODE (pat) == SEQUENCE)
1537 {
1538 emit_insn_after (pat, insn);
1539 PUT_CODE (insn, NOTE);
1540 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1541 NOTE_SOURCE_FILE (insn) = 0;
1542 }
1543 else
1544 PATTERN (insn) = pat;
1545
1546 return;
1547 }
1548
1549 /* Otherwise, storing into VAR must be handled specially
1550 by storing into a temporary and copying that into VAR
1551 with a new insn after this one. Note that this case
1552 will be used when storing into a promoted scalar since
1553 the insn will now have different modes on the input
1554 and output and hence will be invalid (except for the case
1555 of setting it to a constant, which does not need any
1556 change if it is valid). We generate extra code in that case,
1557 but combine.c will eliminate it. */
1558
1559 if (dest == var)
1560 {
1561 rtx temp;
1562 rtx fixeddest = SET_DEST (x);
1563
1564 /* STRICT_LOW_PART can be discarded, around a MEM. */
1565 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
1566 fixeddest = XEXP (fixeddest, 0);
1567 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
1568 if (GET_CODE (fixeddest) == SUBREG)
1569 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
1570 else
1571 fixeddest = fixup_stack_1 (fixeddest, insn);
1572
1573 temp = gen_reg_rtx (GET_MODE (SET_SRC (x)) == VOIDmode
1574 ? GET_MODE (fixeddest)
1575 : GET_MODE (SET_SRC (x)));
1576
1577 emit_insn_after (gen_move_insn (fixeddest,
1578 gen_lowpart (GET_MODE (fixeddest),
1579 temp)),
1580 insn);
1581
1582 SET_DEST (x) = temp;
1583 }
1584 }
1585 }
1586
1587 /* Nothing special about this RTX; fix its operands. */
1588
1589 fmt = GET_RTX_FORMAT (code);
1590 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1591 {
1592 if (fmt[i] == 'e')
1593 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
1594 if (fmt[i] == 'E')
1595 {
1596 register int j;
1597 for (j = 0; j < XVECLEN (x, i); j++)
1598 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
1599 insn, replacements);
1600 }
1601 }
1602 }
1603 \f
1604 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1605 return an rtx (MEM:m1 newaddr) which is equivalent.
1606 If any insns must be emitted to compute NEWADDR, put them before INSN.
1607
1608 UNCRITICAL nonzero means accept paradoxical subregs.
1609 This is used for subregs found inside of ZERO_EXTRACTs. */
1610
1611 static rtx
1612 fixup_memory_subreg (x, insn, uncritical)
1613 rtx x;
1614 rtx insn;
1615 int uncritical;
1616 {
1617 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1618 rtx addr = XEXP (SUBREG_REG (x), 0);
1619 enum machine_mode mode = GET_MODE (x);
1620 rtx saved, result;
1621
1622 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1623 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1624 && ! uncritical)
1625 abort ();
1626
1627 #if BYTES_BIG_ENDIAN
1628 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1629 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1630 #endif
1631 addr = plus_constant (addr, offset);
1632 if (!flag_force_addr && memory_address_p (mode, addr))
1633 /* Shortcut if no insns need be emitted. */
1634 return change_address (SUBREG_REG (x), mode, addr);
1635 start_sequence ();
1636 result = change_address (SUBREG_REG (x), mode, addr);
1637 emit_insn_before (gen_sequence (), insn);
1638 end_sequence ();
1639 return result;
1640 }
1641
1642 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1643 Replace subexpressions of X in place.
1644 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1645 Otherwise return X, with its contents possibly altered.
1646
1647 If any insns must be emitted to compute NEWADDR, put them before INSN. */
1648
1649 static rtx
1650 walk_fixup_memory_subreg (x, insn)
1651 register rtx x;
1652 rtx insn;
1653 {
1654 register enum rtx_code code;
1655 register char *fmt;
1656 register int i;
1657
1658 if (x == 0)
1659 return 0;
1660
1661 code = GET_CODE (x);
1662
1663 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
1664 return fixup_memory_subreg (x, insn, 0);
1665
1666 /* Nothing special about this RTX; fix its operands. */
1667
1668 fmt = GET_RTX_FORMAT (code);
1669 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1670 {
1671 if (fmt[i] == 'e')
1672 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn);
1673 if (fmt[i] == 'E')
1674 {
1675 register int j;
1676 for (j = 0; j < XVECLEN (x, i); j++)
1677 XVECEXP (x, i, j)
1678 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn);
1679 }
1680 }
1681 return x;
1682 }
1683 \f
1684 #if 0
1685 /* Fix up any references to stack slots that are invalid memory addresses
1686 because they exceed the maximum range of a displacement. */
1687
1688 void
1689 fixup_stack_slots ()
1690 {
1691 register rtx insn;
1692
1693 /* Did we generate a stack slot that is out of range
1694 or otherwise has an invalid address? */
1695 if (invalid_stack_slot)
1696 {
1697 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1698 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1699 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1700 || GET_CODE (insn) == JUMP_INSN)
1701 fixup_stack_1 (PATTERN (insn), insn);
1702 }
1703 }
1704 #endif
1705
1706 /* For each memory ref within X, if it refers to a stack slot
1707 with an out of range displacement, put the address in a temp register
1708 (emitting new insns before INSN to load these registers)
1709 and alter the memory ref to use that register.
1710 Replace each such MEM rtx with a copy, to avoid clobberage. */
1711
1712 static rtx
1713 fixup_stack_1 (x, insn)
1714 rtx x;
1715 rtx insn;
1716 {
1717 register int i;
1718 register RTX_CODE code = GET_CODE (x);
1719 register char *fmt;
1720
1721 if (code == MEM)
1722 {
1723 register rtx ad = XEXP (x, 0);
1724 /* If we have address of a stack slot but it's not valid
1725 (displacement is too large), compute the sum in a register. */
1726 if (GET_CODE (ad) == PLUS
1727 && GET_CODE (XEXP (ad, 0)) == REG
1728 && REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1729 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER
1730 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1731 {
1732 rtx temp, seq;
1733 if (memory_address_p (GET_MODE (x), ad))
1734 return x;
1735
1736 start_sequence ();
1737 temp = copy_to_reg (ad);
1738 seq = gen_sequence ();
1739 end_sequence ();
1740 emit_insn_before (seq, insn);
1741 return change_address (x, VOIDmode, temp);
1742 }
1743 return x;
1744 }
1745
1746 fmt = GET_RTX_FORMAT (code);
1747 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1748 {
1749 if (fmt[i] == 'e')
1750 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1751 if (fmt[i] == 'E')
1752 {
1753 register int j;
1754 for (j = 0; j < XVECLEN (x, i); j++)
1755 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
1756 }
1757 }
1758 return x;
1759 }
1760 \f
1761 /* Optimization: a bit-field instruction whose field
1762 happens to be a byte or halfword in memory
1763 can be changed to a move instruction.
1764
1765 We call here when INSN is an insn to examine or store into a bit-field.
1766 BODY is the SET-rtx to be altered.
1767
1768 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1769 (Currently this is called only from function.c, and EQUIV_MEM
1770 is always 0.) */
1771
1772 static void
1773 optimize_bit_field (body, insn, equiv_mem)
1774 rtx body;
1775 rtx insn;
1776 rtx *equiv_mem;
1777 {
1778 register rtx bitfield;
1779 int destflag;
1780 rtx seq = 0;
1781 enum machine_mode mode;
1782
1783 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
1784 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
1785 bitfield = SET_DEST (body), destflag = 1;
1786 else
1787 bitfield = SET_SRC (body), destflag = 0;
1788
1789 /* First check that the field being stored has constant size and position
1790 and is in fact a byte or halfword suitably aligned. */
1791
1792 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
1793 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
1794 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
1795 != BLKmode)
1796 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
1797 {
1798 register rtx memref = 0;
1799
1800 /* Now check that the containing word is memory, not a register,
1801 and that it is safe to change the machine mode. */
1802
1803 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
1804 memref = XEXP (bitfield, 0);
1805 else if (GET_CODE (XEXP (bitfield, 0)) == REG
1806 && equiv_mem != 0)
1807 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
1808 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1809 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
1810 memref = SUBREG_REG (XEXP (bitfield, 0));
1811 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1812 && equiv_mem != 0
1813 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
1814 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
1815
1816 if (memref
1817 && ! mode_dependent_address_p (XEXP (memref, 0))
1818 && ! MEM_VOLATILE_P (memref))
1819 {
1820 /* Now adjust the address, first for any subreg'ing
1821 that we are now getting rid of,
1822 and then for which byte of the word is wanted. */
1823
1824 register int offset = INTVAL (XEXP (bitfield, 2));
1825 /* Adjust OFFSET to count bits from low-address byte. */
1826 #if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1827 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
1828 - offset - INTVAL (XEXP (bitfield, 1)));
1829 #endif
1830 /* Adjust OFFSET to count bytes from low-address byte. */
1831 offset /= BITS_PER_UNIT;
1832 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
1833 {
1834 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
1835 #if BYTES_BIG_ENDIAN
1836 offset -= (MIN (UNITS_PER_WORD,
1837 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
1838 - MIN (UNITS_PER_WORD,
1839 GET_MODE_SIZE (GET_MODE (memref))));
1840 #endif
1841 }
1842
1843 memref = change_address (memref, mode,
1844 plus_constant (XEXP (memref, 0), offset));
1845
1846 /* Store this memory reference where
1847 we found the bit field reference. */
1848
1849 if (destflag)
1850 {
1851 validate_change (insn, &SET_DEST (body), memref, 1);
1852 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
1853 {
1854 rtx src = SET_SRC (body);
1855 while (GET_CODE (src) == SUBREG
1856 && SUBREG_WORD (src) == 0)
1857 src = SUBREG_REG (src);
1858 if (GET_MODE (src) != GET_MODE (memref))
1859 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
1860 validate_change (insn, &SET_SRC (body), src, 1);
1861 }
1862 else if (GET_MODE (SET_SRC (body)) != VOIDmode
1863 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
1864 /* This shouldn't happen because anything that didn't have
1865 one of these modes should have got converted explicitly
1866 and then referenced through a subreg.
1867 This is so because the original bit-field was
1868 handled by agg_mode and so its tree structure had
1869 the same mode that memref now has. */
1870 abort ();
1871 }
1872 else
1873 {
1874 rtx dest = SET_DEST (body);
1875
1876 while (GET_CODE (dest) == SUBREG
1877 && SUBREG_WORD (dest) == 0)
1878 dest = SUBREG_REG (dest);
1879
1880 validate_change (insn, &SET_DEST (body), dest, 1);
1881
1882 if (GET_MODE (dest) == GET_MODE (memref))
1883 validate_change (insn, &SET_SRC (body), memref, 1);
1884 else
1885 {
1886 /* Convert the mem ref to the destination mode. */
1887 rtx newreg = gen_reg_rtx (GET_MODE (dest));
1888
1889 start_sequence ();
1890 convert_move (newreg, memref,
1891 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
1892 seq = get_insns ();
1893 end_sequence ();
1894
1895 validate_change (insn, &SET_SRC (body), newreg, 1);
1896 }
1897 }
1898
1899 /* See if we can convert this extraction or insertion into
1900 a simple move insn. We might not be able to do so if this
1901 was, for example, part of a PARALLEL.
1902
1903 If we succeed, write out any needed conversions. If we fail,
1904 it is hard to guess why we failed, so don't do anything
1905 special; just let the optimization be suppressed. */
1906
1907 if (apply_change_group () && seq)
1908 emit_insns_before (seq, insn);
1909 }
1910 }
1911 }
1912 \f
1913 /* These routines are responsible for converting virtual register references
1914 to the actual hard register references once RTL generation is complete.
1915
1916 The following four variables are used for communication between the
1917 routines. They contain the offsets of the virtual registers from their
1918 respective hard registers. */
1919
1920 static int in_arg_offset;
1921 static int var_offset;
1922 static int dynamic_offset;
1923 static int out_arg_offset;
1924
1925 /* In most machines, the stack pointer register is equivalent to the bottom
1926 of the stack. */
1927
1928 #ifndef STACK_POINTER_OFFSET
1929 #define STACK_POINTER_OFFSET 0
1930 #endif
1931
1932 /* If not defined, pick an appropriate default for the offset of dynamically
1933 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1934 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1935
1936 #ifndef STACK_DYNAMIC_OFFSET
1937
1938 #ifdef ACCUMULATE_OUTGOING_ARGS
1939 /* The bottom of the stack points to the actual arguments. If
1940 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1941 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1942 stack space for register parameters is not pushed by the caller, but
1943 rather part of the fixed stack areas and hence not included in
1944 `current_function_outgoing_args_size'. Nevertheless, we must allow
1945 for it when allocating stack dynamic objects. */
1946
1947 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1948 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1949 (current_function_outgoing_args_size \
1950 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
1951
1952 #else
1953 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1954 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
1955 #endif
1956
1957 #else
1958 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
1959 #endif
1960 #endif
1961
1962 /* Pass through the INSNS of function FNDECL and convert virtual register
1963 references to hard register references. */
1964
1965 void
1966 instantiate_virtual_regs (fndecl, insns)
1967 tree fndecl;
1968 rtx insns;
1969 {
1970 rtx insn;
1971
1972 /* Compute the offsets to use for this function. */
1973 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
1974 var_offset = STARTING_FRAME_OFFSET;
1975 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
1976 out_arg_offset = STACK_POINTER_OFFSET;
1977
1978 /* Scan all variables and parameters of this function. For each that is
1979 in memory, instantiate all virtual registers if the result is a valid
1980 address. If not, we do it later. That will handle most uses of virtual
1981 regs on many machines. */
1982 instantiate_decls (fndecl, 1);
1983
1984 /* Initialize recognition, indicating that volatile is OK. */
1985 init_recog ();
1986
1987 /* Scan through all the insns, instantiating every virtual register still
1988 present. */
1989 for (insn = insns; insn; insn = NEXT_INSN (insn))
1990 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1991 || GET_CODE (insn) == CALL_INSN)
1992 {
1993 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1994 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
1995 }
1996
1997 /* Now instantiate the remaining register equivalences for debugging info.
1998 These will not be valid addresses. */
1999 instantiate_decls (fndecl, 0);
2000
2001 /* Indicate that, from now on, assign_stack_local should use
2002 frame_pointer_rtx. */
2003 virtuals_instantiated = 1;
2004 }
2005
2006 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2007 all virtual registers in their DECL_RTL's.
2008
2009 If VALID_ONLY, do this only if the resulting address is still valid.
2010 Otherwise, always do it. */
2011
2012 static void
2013 instantiate_decls (fndecl, valid_only)
2014 tree fndecl;
2015 int valid_only;
2016 {
2017 tree decl;
2018
2019 if (DECL_INLINE (fndecl))
2020 /* When compiling an inline function, the obstack used for
2021 rtl allocation is the maybepermanent_obstack. Calling
2022 `resume_temporary_allocation' switches us back to that
2023 obstack while we process this function's parameters. */
2024 resume_temporary_allocation ();
2025
2026 /* Process all parameters of the function. */
2027 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2028 {
2029 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2030 valid_only);
2031 instantiate_decl (DECL_INCOMING_RTL (decl),
2032 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
2033 }
2034
2035 /* Now process all variables defined in the function or its subblocks. */
2036 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2037
2038 if (DECL_INLINE (fndecl))
2039 {
2040 /* Save all rtl allocated for this function by raising the
2041 high-water mark on the maybepermanent_obstack. */
2042 preserve_data ();
2043 /* All further rtl allocation is now done in the current_obstack. */
2044 rtl_in_current_obstack ();
2045 }
2046 }
2047
2048 /* Subroutine of instantiate_decls: Process all decls in the given
2049 BLOCK node and all its subblocks. */
2050
2051 static void
2052 instantiate_decls_1 (let, valid_only)
2053 tree let;
2054 int valid_only;
2055 {
2056 tree t;
2057
2058 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2059 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2060 valid_only);
2061
2062 /* Process all subblocks. */
2063 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2064 instantiate_decls_1 (t, valid_only);
2065 }
2066
2067 /* Subroutine of the preceding procedures: Given RTL representing a
2068 decl and the size of the object, do any instantiation required.
2069
2070 If VALID_ONLY is non-zero, it means that the RTL should only be
2071 changed if the new address is valid. */
2072
2073 static void
2074 instantiate_decl (x, size, valid_only)
2075 rtx x;
2076 int size;
2077 int valid_only;
2078 {
2079 enum machine_mode mode;
2080 rtx addr;
2081
2082 /* If this is not a MEM, no need to do anything. Similarly if the
2083 address is a constant or a register that is not a virtual register. */
2084
2085 if (x == 0 || GET_CODE (x) != MEM)
2086 return;
2087
2088 addr = XEXP (x, 0);
2089 if (CONSTANT_P (addr)
2090 || (GET_CODE (addr) == REG
2091 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2092 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2093 return;
2094
2095 /* If we should only do this if the address is valid, copy the address.
2096 We need to do this so we can undo any changes that might make the
2097 address invalid. This copy is unfortunate, but probably can't be
2098 avoided. */
2099
2100 if (valid_only)
2101 addr = copy_rtx (addr);
2102
2103 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2104
2105 if (! valid_only)
2106 return;
2107
2108 /* Now verify that the resulting address is valid for every integer or
2109 floating-point mode up to and including SIZE bytes long. We do this
2110 since the object might be accessed in any mode and frame addresses
2111 are shared. */
2112
2113 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2114 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2115 mode = GET_MODE_WIDER_MODE (mode))
2116 if (! memory_address_p (mode, addr))
2117 return;
2118
2119 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2120 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2121 mode = GET_MODE_WIDER_MODE (mode))
2122 if (! memory_address_p (mode, addr))
2123 return;
2124
2125 /* Otherwise, put back the address, now that we have updated it and we
2126 know it is valid. */
2127
2128 XEXP (x, 0) = addr;
2129 }
2130 \f
2131 /* Given a pointer to a piece of rtx and an optional pointer to the
2132 containing object, instantiate any virtual registers present in it.
2133
2134 If EXTRA_INSNS, we always do the replacement and generate
2135 any extra insns before OBJECT. If it zero, we do nothing if replacement
2136 is not valid.
2137
2138 Return 1 if we either had nothing to do or if we were able to do the
2139 needed replacement. Return 0 otherwise; we only return zero if
2140 EXTRA_INSNS is zero.
2141
2142 We first try some simple transformations to avoid the creation of extra
2143 pseudos. */
2144
2145 static int
2146 instantiate_virtual_regs_1 (loc, object, extra_insns)
2147 rtx *loc;
2148 rtx object;
2149 int extra_insns;
2150 {
2151 rtx x;
2152 RTX_CODE code;
2153 rtx new = 0;
2154 int offset;
2155 rtx temp;
2156 rtx seq;
2157 int i, j;
2158 char *fmt;
2159
2160 /* Re-start here to avoid recursion in common cases. */
2161 restart:
2162
2163 x = *loc;
2164 if (x == 0)
2165 return 1;
2166
2167 code = GET_CODE (x);
2168
2169 /* Check for some special cases. */
2170 switch (code)
2171 {
2172 case CONST_INT:
2173 case CONST_DOUBLE:
2174 case CONST:
2175 case SYMBOL_REF:
2176 case CODE_LABEL:
2177 case PC:
2178 case CC0:
2179 case ASM_INPUT:
2180 case ADDR_VEC:
2181 case ADDR_DIFF_VEC:
2182 case RETURN:
2183 return 1;
2184
2185 case SET:
2186 /* We are allowed to set the virtual registers. This means that
2187 that the actual register should receive the source minus the
2188 appropriate offset. This is used, for example, in the handling
2189 of non-local gotos. */
2190 if (SET_DEST (x) == virtual_incoming_args_rtx)
2191 new = arg_pointer_rtx, offset = - in_arg_offset;
2192 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2193 new = frame_pointer_rtx, offset = - var_offset;
2194 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2195 new = stack_pointer_rtx, offset = - dynamic_offset;
2196 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2197 new = stack_pointer_rtx, offset = - out_arg_offset;
2198
2199 if (new)
2200 {
2201 /* The only valid sources here are PLUS or REG. Just do
2202 the simplest possible thing to handle them. */
2203 if (GET_CODE (SET_SRC (x)) != REG
2204 && GET_CODE (SET_SRC (x)) != PLUS)
2205 abort ();
2206
2207 start_sequence ();
2208 if (GET_CODE (SET_SRC (x)) != REG)
2209 temp = force_operand (SET_SRC (x), NULL_RTX);
2210 else
2211 temp = SET_SRC (x);
2212 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2213 seq = get_insns ();
2214 end_sequence ();
2215
2216 emit_insns_before (seq, object);
2217 SET_DEST (x) = new;
2218
2219 if (!validate_change (object, &SET_SRC (x), temp, 0)
2220 || ! extra_insns)
2221 abort ();
2222
2223 return 1;
2224 }
2225
2226 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2227 loc = &SET_SRC (x);
2228 goto restart;
2229
2230 case PLUS:
2231 /* Handle special case of virtual register plus constant. */
2232 if (CONSTANT_P (XEXP (x, 1)))
2233 {
2234 rtx old;
2235
2236 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2237 if (GET_CODE (XEXP (x, 0)) == PLUS)
2238 {
2239 rtx inner = XEXP (XEXP (x, 0), 0);
2240
2241 if (inner == virtual_incoming_args_rtx)
2242 new = arg_pointer_rtx, offset = in_arg_offset;
2243 else if (inner == virtual_stack_vars_rtx)
2244 new = frame_pointer_rtx, offset = var_offset;
2245 else if (inner == virtual_stack_dynamic_rtx)
2246 new = stack_pointer_rtx, offset = dynamic_offset;
2247 else if (inner == virtual_outgoing_args_rtx)
2248 new = stack_pointer_rtx, offset = out_arg_offset;
2249 else
2250 {
2251 loc = &XEXP (x, 0);
2252 goto restart;
2253 }
2254
2255 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2256 extra_insns);
2257 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2258 }
2259
2260 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2261 new = arg_pointer_rtx, offset = in_arg_offset;
2262 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2263 new = frame_pointer_rtx, offset = var_offset;
2264 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2265 new = stack_pointer_rtx, offset = dynamic_offset;
2266 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2267 new = stack_pointer_rtx, offset = out_arg_offset;
2268 else
2269 {
2270 /* We know the second operand is a constant. Unless the
2271 first operand is a REG (which has been already checked),
2272 it needs to be checked. */
2273 if (GET_CODE (XEXP (x, 0)) != REG)
2274 {
2275 loc = &XEXP (x, 0);
2276 goto restart;
2277 }
2278 return 1;
2279 }
2280
2281 old = XEXP (x, 0);
2282 XEXP (x, 0) = new;
2283 new = plus_constant (XEXP (x, 1), offset);
2284
2285 /* If the new constant is zero, try to replace the sum with its
2286 first operand. */
2287 if (new == const0_rtx
2288 && validate_change (object, loc, XEXP (x, 0), 0))
2289 return 1;
2290
2291 /* Next try to replace constant with new one. */
2292 if (!validate_change (object, &XEXP (x, 1), new, 0))
2293 {
2294 if (! extra_insns)
2295 {
2296 XEXP (x, 0) = old;
2297 return 0;
2298 }
2299
2300 /* Otherwise copy the new constant into a register and replace
2301 constant with that register. */
2302 temp = gen_reg_rtx (Pmode);
2303 if (validate_change (object, &XEXP (x, 1), temp, 0))
2304 emit_insn_before (gen_move_insn (temp, new), object);
2305 else
2306 {
2307 /* If that didn't work, replace this expression with a
2308 register containing the sum. */
2309
2310 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2311 XEXP (x, 0) = old;
2312
2313 start_sequence ();
2314 temp = force_operand (new, NULL_RTX);
2315 seq = get_insns ();
2316 end_sequence ();
2317
2318 emit_insns_before (seq, object);
2319 if (! validate_change (object, loc, temp, 0)
2320 && ! validate_replace_rtx (x, temp, object))
2321 abort ();
2322 }
2323 }
2324
2325 return 1;
2326 }
2327
2328 /* Fall through to generic two-operand expression case. */
2329 case EXPR_LIST:
2330 case CALL:
2331 case COMPARE:
2332 case MINUS:
2333 case MULT:
2334 case DIV: case UDIV:
2335 case MOD: case UMOD:
2336 case AND: case IOR: case XOR:
2337 case LSHIFT: case ASHIFT: case ROTATE:
2338 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2339 case NE: case EQ:
2340 case GE: case GT: case GEU: case GTU:
2341 case LE: case LT: case LEU: case LTU:
2342 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2343 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2344 loc = &XEXP (x, 0);
2345 goto restart;
2346
2347 case MEM:
2348 /* Most cases of MEM that convert to valid addresses have already been
2349 handled by our scan of regno_reg_rtx. The only special handling we
2350 need here is to make a copy of the rtx to ensure it isn't being
2351 shared if we have to change it to a pseudo.
2352
2353 If the rtx is a simple reference to an address via a virtual register,
2354 it can potentially be shared. In such cases, first try to make it
2355 a valid address, which can also be shared. Otherwise, copy it and
2356 proceed normally.
2357
2358 First check for common cases that need no processing. These are
2359 usually due to instantiation already being done on a previous instance
2360 of a shared rtx. */
2361
2362 temp = XEXP (x, 0);
2363 if (CONSTANT_ADDRESS_P (temp)
2364 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2365 || temp == arg_pointer_rtx
2366 #endif
2367 || temp == frame_pointer_rtx)
2368 return 1;
2369
2370 if (GET_CODE (temp) == PLUS
2371 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2372 && (XEXP (temp, 0) == frame_pointer_rtx
2373 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2374 || XEXP (temp, 0) == arg_pointer_rtx
2375 #endif
2376 ))
2377 return 1;
2378
2379 if (temp == virtual_stack_vars_rtx
2380 || temp == virtual_incoming_args_rtx
2381 || (GET_CODE (temp) == PLUS
2382 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2383 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2384 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2385 {
2386 /* This MEM may be shared. If the substitution can be done without
2387 the need to generate new pseudos, we want to do it in place
2388 so all copies of the shared rtx benefit. The call below will
2389 only make substitutions if the resulting address is still
2390 valid.
2391
2392 Note that we cannot pass X as the object in the recursive call
2393 since the insn being processed may not allow all valid
2394 addresses. However, if we were not passed on object, we can
2395 only modify X without copying it if X will have a valid
2396 address.
2397
2398 ??? Also note that this can still lose if OBJECT is an insn that
2399 has less restrictions on an address that some other insn.
2400 In that case, we will modify the shared address. This case
2401 doesn't seem very likely, though. */
2402
2403 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2404 object ? object : x, 0))
2405 return 1;
2406
2407 /* Otherwise make a copy and process that copy. We copy the entire
2408 RTL expression since it might be a PLUS which could also be
2409 shared. */
2410 *loc = x = copy_rtx (x);
2411 }
2412
2413 /* Fall through to generic unary operation case. */
2414 case USE:
2415 case CLOBBER:
2416 case SUBREG:
2417 case STRICT_LOW_PART:
2418 case NEG: case NOT:
2419 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2420 case SIGN_EXTEND: case ZERO_EXTEND:
2421 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2422 case FLOAT: case FIX:
2423 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2424 case ABS:
2425 case SQRT:
2426 case FFS:
2427 /* These case either have just one operand or we know that we need not
2428 check the rest of the operands. */
2429 loc = &XEXP (x, 0);
2430 goto restart;
2431
2432 case REG:
2433 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2434 in front of this insn and substitute the temporary. */
2435 if (x == virtual_incoming_args_rtx)
2436 new = arg_pointer_rtx, offset = in_arg_offset;
2437 else if (x == virtual_stack_vars_rtx)
2438 new = frame_pointer_rtx, offset = var_offset;
2439 else if (x == virtual_stack_dynamic_rtx)
2440 new = stack_pointer_rtx, offset = dynamic_offset;
2441 else if (x == virtual_outgoing_args_rtx)
2442 new = stack_pointer_rtx, offset = out_arg_offset;
2443
2444 if (new)
2445 {
2446 temp = plus_constant (new, offset);
2447 if (!validate_change (object, loc, temp, 0))
2448 {
2449 if (! extra_insns)
2450 return 0;
2451
2452 start_sequence ();
2453 temp = force_operand (temp, NULL_RTX);
2454 seq = get_insns ();
2455 end_sequence ();
2456
2457 emit_insns_before (seq, object);
2458 if (! validate_change (object, loc, temp, 0)
2459 && ! validate_replace_rtx (x, temp, object))
2460 abort ();
2461 }
2462 }
2463
2464 return 1;
2465 }
2466
2467 /* Scan all subexpressions. */
2468 fmt = GET_RTX_FORMAT (code);
2469 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2470 if (*fmt == 'e')
2471 {
2472 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2473 return 0;
2474 }
2475 else if (*fmt == 'E')
2476 for (j = 0; j < XVECLEN (x, i); j++)
2477 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2478 extra_insns))
2479 return 0;
2480
2481 return 1;
2482 }
2483 \f
2484 /* Optimization: assuming this function does not receive nonlocal gotos,
2485 delete the handlers for such, as well as the insns to establish
2486 and disestablish them. */
2487
2488 static void
2489 delete_handlers ()
2490 {
2491 rtx insn;
2492 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2493 {
2494 /* Delete the handler by turning off the flag that would
2495 prevent jump_optimize from deleting it.
2496 Also permit deletion of the nonlocal labels themselves
2497 if nothing local refers to them. */
2498 if (GET_CODE (insn) == CODE_LABEL)
2499 LABEL_PRESERVE_P (insn) = 0;
2500 if (GET_CODE (insn) == INSN
2501 && ((nonlocal_goto_handler_slot != 0
2502 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
2503 || (nonlocal_goto_stack_level != 0
2504 && reg_mentioned_p (nonlocal_goto_stack_level,
2505 PATTERN (insn)))))
2506 delete_insn (insn);
2507 }
2508 }
2509
2510 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2511 of the current function. */
2512
2513 rtx
2514 nonlocal_label_rtx_list ()
2515 {
2516 tree t;
2517 rtx x = 0;
2518
2519 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2520 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2521
2522 return x;
2523 }
2524 \f
2525 /* Output a USE for any register use in RTL.
2526 This is used with -noreg to mark the extent of lifespan
2527 of any registers used in a user-visible variable's DECL_RTL. */
2528
2529 void
2530 use_variable (rtl)
2531 rtx rtl;
2532 {
2533 if (GET_CODE (rtl) == REG)
2534 /* This is a register variable. */
2535 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2536 else if (GET_CODE (rtl) == MEM
2537 && GET_CODE (XEXP (rtl, 0)) == REG
2538 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2539 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2540 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2541 /* This is a variable-sized structure. */
2542 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2543 }
2544
2545 /* Like use_variable except that it outputs the USEs after INSN
2546 instead of at the end of the insn-chain. */
2547
2548 void
2549 use_variable_after (rtl, insn)
2550 rtx rtl, insn;
2551 {
2552 if (GET_CODE (rtl) == REG)
2553 /* This is a register variable. */
2554 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2555 else if (GET_CODE (rtl) == MEM
2556 && GET_CODE (XEXP (rtl, 0)) == REG
2557 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2558 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2559 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2560 /* This is a variable-sized structure. */
2561 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2562 }
2563 \f
2564 int
2565 max_parm_reg_num ()
2566 {
2567 return max_parm_reg;
2568 }
2569
2570 /* Return the first insn following those generated by `assign_parms'. */
2571
2572 rtx
2573 get_first_nonparm_insn ()
2574 {
2575 if (last_parm_insn)
2576 return NEXT_INSN (last_parm_insn);
2577 return get_insns ();
2578 }
2579
2580 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
2581 Crash if there is none. */
2582
2583 rtx
2584 get_first_block_beg ()
2585 {
2586 register rtx searcher;
2587 register rtx insn = get_first_nonparm_insn ();
2588
2589 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
2590 if (GET_CODE (searcher) == NOTE
2591 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
2592 return searcher;
2593
2594 abort (); /* Invalid call to this function. (See comments above.) */
2595 return NULL_RTX;
2596 }
2597
2598 /* Return 1 if EXP returns an aggregate value, for which an address
2599 must be passed to the function or returned by the function. */
2600
2601 int
2602 aggregate_value_p (exp)
2603 tree exp;
2604 {
2605 int i, regno, nregs;
2606 rtx reg;
2607 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2608 return 1;
2609 if (RETURN_IN_MEMORY (TREE_TYPE (exp)))
2610 return 1;
2611 if (flag_pcc_struct_return
2612 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2613 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE))
2614 return 1;
2615 /* Make sure we have suitable call-clobbered regs to return
2616 the value in; if not, we must return it in memory. */
2617 reg = hard_function_value (TREE_TYPE (exp), 0);
2618 regno = REGNO (reg);
2619 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (TREE_TYPE (exp)));
2620 for (i = 0; i < nregs; i++)
2621 if (! call_used_regs[regno + i])
2622 return 1;
2623 return 0;
2624 }
2625 \f
2626 /* Assign RTL expressions to the function's parameters.
2627 This may involve copying them into registers and using
2628 those registers as the RTL for them.
2629
2630 If SECOND_TIME is non-zero it means that this function is being
2631 called a second time. This is done by integrate.c when a function's
2632 compilation is deferred. We need to come back here in case the
2633 FUNCTION_ARG macro computes items needed for the rest of the compilation
2634 (such as changing which registers are fixed or caller-saved). But suppress
2635 writing any insns or setting DECL_RTL of anything in this case. */
2636
2637 void
2638 assign_parms (fndecl, second_time)
2639 tree fndecl;
2640 int second_time;
2641 {
2642 register tree parm;
2643 register rtx entry_parm = 0;
2644 register rtx stack_parm = 0;
2645 CUMULATIVE_ARGS args_so_far;
2646 enum machine_mode promoted_mode, passed_mode, nominal_mode;
2647 int unsignedp;
2648 /* Total space needed so far for args on the stack,
2649 given as a constant and a tree-expression. */
2650 struct args_size stack_args_size;
2651 tree fntype = TREE_TYPE (fndecl);
2652 tree fnargs = DECL_ARGUMENTS (fndecl);
2653 /* This is used for the arg pointer when referring to stack args. */
2654 rtx internal_arg_pointer;
2655 /* This is a dummy PARM_DECL that we used for the function result if
2656 the function returns a structure. */
2657 tree function_result_decl = 0;
2658 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2659 int varargs_setup = 0;
2660 rtx conversion_insns = 0;
2661
2662 /* Nonzero if the last arg is named `__builtin_va_alist',
2663 which is used on some machines for old-fashioned non-ANSI varargs.h;
2664 this should be stuck onto the stack as if it had arrived there. */
2665 int vararg
2666 = (fnargs
2667 && (parm = tree_last (fnargs)) != 0
2668 && DECL_NAME (parm)
2669 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2670 "__builtin_va_alist")));
2671
2672 /* Nonzero if function takes extra anonymous args.
2673 This means the last named arg must be on the stack
2674 right before the anonymous ones. */
2675 int stdarg
2676 = (TYPE_ARG_TYPES (fntype) != 0
2677 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2678 != void_type_node));
2679
2680 /* If the reg that the virtual arg pointer will be translated into is
2681 not a fixed reg or is the stack pointer, make a copy of the virtual
2682 arg pointer, and address parms via the copy. The frame pointer is
2683 considered fixed even though it is not marked as such.
2684
2685 The second time through, simply use ap to avoid generating rtx. */
2686
2687 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2688 || ! (fixed_regs[ARG_POINTER_REGNUM]
2689 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2690 && ! second_time)
2691 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2692 else
2693 internal_arg_pointer = virtual_incoming_args_rtx;
2694 current_function_internal_arg_pointer = internal_arg_pointer;
2695
2696 stack_args_size.constant = 0;
2697 stack_args_size.var = 0;
2698
2699 /* If struct value address is treated as the first argument, make it so. */
2700 if (aggregate_value_p (DECL_RESULT (fndecl))
2701 && ! current_function_returns_pcc_struct
2702 && struct_value_incoming_rtx == 0)
2703 {
2704 tree type = build_pointer_type (fntype);
2705
2706 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
2707
2708 DECL_ARG_TYPE (function_result_decl) = type;
2709 TREE_CHAIN (function_result_decl) = fnargs;
2710 fnargs = function_result_decl;
2711 }
2712
2713 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2714 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2715
2716 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2717 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
2718 #else
2719 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX);
2720 #endif
2721
2722 /* We haven't yet found an argument that we must push and pretend the
2723 caller did. */
2724 current_function_pretend_args_size = 0;
2725
2726 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2727 {
2728 int aggregate
2729 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2730 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
2731 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE);
2732 struct args_size stack_offset;
2733 struct args_size arg_size;
2734 int passed_pointer = 0;
2735 tree passed_type = DECL_ARG_TYPE (parm);
2736
2737 /* Set LAST_NAMED if this is last named arg before some
2738 anonymous args. We treat it as if it were anonymous too. */
2739 int last_named = ((TREE_CHAIN (parm) == 0
2740 || DECL_NAME (TREE_CHAIN (parm)) == 0)
2741 && (vararg || stdarg));
2742
2743 if (TREE_TYPE (parm) == error_mark_node
2744 /* This can happen after weird syntax errors
2745 or if an enum type is defined among the parms. */
2746 || TREE_CODE (parm) != PARM_DECL
2747 || passed_type == NULL)
2748 {
2749 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
2750 const0_rtx);
2751 TREE_USED (parm) = 1;
2752 continue;
2753 }
2754
2755 /* For varargs.h function, save info about regs and stack space
2756 used by the individual args, not including the va_alist arg. */
2757 if (vararg && last_named)
2758 current_function_args_info = args_so_far;
2759
2760 /* Find mode of arg as it is passed, and mode of arg
2761 as it should be during execution of this function. */
2762 passed_mode = TYPE_MODE (passed_type);
2763 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
2764
2765 /* If the parm's mode is VOID, its value doesn't matter,
2766 and avoid the usual things like emit_move_insn that could crash. */
2767 if (nominal_mode == VOIDmode)
2768 {
2769 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
2770 continue;
2771 }
2772
2773 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2774 /* See if this arg was passed by invisible reference. */
2775 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2776 passed_type, ! last_named))
2777 {
2778 passed_type = build_pointer_type (passed_type);
2779 passed_pointer = 1;
2780 passed_mode = nominal_mode = Pmode;
2781 }
2782 #endif
2783
2784 promoted_mode = passed_mode;
2785
2786 #ifdef PROMOTE_FUNCTION_ARGS
2787 /* Compute the mode in which the arg is actually extended to. */
2788 if (TREE_CODE (passed_type) == INTEGER_TYPE
2789 || TREE_CODE (passed_type) == ENUMERAL_TYPE
2790 || TREE_CODE (passed_type) == BOOLEAN_TYPE
2791 || TREE_CODE (passed_type) == CHAR_TYPE
2792 || TREE_CODE (passed_type) == REAL_TYPE
2793 || TREE_CODE (passed_type) == POINTER_TYPE
2794 || TREE_CODE (passed_type) == OFFSET_TYPE)
2795 {
2796 unsignedp = TREE_UNSIGNED (passed_type);
2797 PROMOTE_MODE (promoted_mode, unsignedp, passed_type);
2798 }
2799 #endif
2800
2801 /* Let machine desc say which reg (if any) the parm arrives in.
2802 0 means it arrives on the stack. */
2803 #ifdef FUNCTION_INCOMING_ARG
2804 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
2805 passed_type, ! last_named);
2806 #else
2807 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
2808 passed_type, ! last_named);
2809 #endif
2810
2811 if (entry_parm)
2812 passed_mode = promoted_mode;
2813
2814 #ifdef SETUP_INCOMING_VARARGS
2815 /* If this is the last named parameter, do any required setup for
2816 varargs or stdargs. We need to know about the case of this being an
2817 addressable type, in which case we skip the registers it
2818 would have arrived in.
2819
2820 For stdargs, LAST_NAMED will be set for two parameters, the one that
2821 is actually the last named, and the dummy parameter. We only
2822 want to do this action once.
2823
2824 Also, indicate when RTL generation is to be suppressed. */
2825 if (last_named && !varargs_setup)
2826 {
2827 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
2828 current_function_pretend_args_size,
2829 second_time);
2830 varargs_setup = 1;
2831 }
2832 #endif
2833
2834 /* Determine parm's home in the stack,
2835 in case it arrives in the stack or we should pretend it did.
2836
2837 Compute the stack position and rtx where the argument arrives
2838 and its size.
2839
2840 There is one complexity here: If this was a parameter that would
2841 have been passed in registers, but wasn't only because it is
2842 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2843 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2844 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2845 0 as it was the previous time. */
2846
2847 locate_and_pad_parm (passed_mode, passed_type,
2848 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2849 1,
2850 #else
2851 #ifdef FUNCTION_INCOMING_ARG
2852 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2853 passed_type,
2854 (! last_named
2855 || varargs_setup)) != 0,
2856 #else
2857 FUNCTION_ARG (args_so_far, passed_mode,
2858 passed_type,
2859 ! last_named || varargs_setup) != 0,
2860 #endif
2861 #endif
2862 fndecl, &stack_args_size, &stack_offset, &arg_size);
2863
2864 if (! second_time)
2865 {
2866 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
2867
2868 if (offset_rtx == const0_rtx)
2869 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
2870 else
2871 stack_parm = gen_rtx (MEM, passed_mode,
2872 gen_rtx (PLUS, Pmode,
2873 internal_arg_pointer, offset_rtx));
2874
2875 /* If this is a memory ref that contains aggregate components,
2876 mark it as such for cse and loop optimize. */
2877 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2878 }
2879
2880 /* If this parameter was passed both in registers and in the stack,
2881 use the copy on the stack. */
2882 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
2883 entry_parm = 0;
2884
2885 /* If this parm was passed part in regs and part in memory,
2886 pretend it arrived entirely in memory
2887 by pushing the register-part onto the stack.
2888
2889 In the special case of a DImode or DFmode that is split,
2890 we could put it together in a pseudoreg directly,
2891 but for now that's not worth bothering with. */
2892
2893 if (entry_parm)
2894 {
2895 int nregs = 0;
2896 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2897 nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
2898 passed_type, ! last_named);
2899 #endif
2900
2901 if (nregs > 0)
2902 {
2903 current_function_pretend_args_size
2904 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
2905 / (PARM_BOUNDARY / BITS_PER_UNIT)
2906 * (PARM_BOUNDARY / BITS_PER_UNIT));
2907
2908 if (! second_time)
2909 move_block_from_reg (REGNO (entry_parm),
2910 validize_mem (stack_parm), nregs);
2911 entry_parm = stack_parm;
2912 }
2913 }
2914
2915 /* If we didn't decide this parm came in a register,
2916 by default it came on the stack. */
2917 if (entry_parm == 0)
2918 entry_parm = stack_parm;
2919
2920 /* Record permanently how this parm was passed. */
2921 if (! second_time)
2922 DECL_INCOMING_RTL (parm) = entry_parm;
2923
2924 /* If there is actually space on the stack for this parm,
2925 count it in stack_args_size; otherwise set stack_parm to 0
2926 to indicate there is no preallocated stack slot for the parm. */
2927
2928 if (entry_parm == stack_parm
2929 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
2930 /* On some machines, even if a parm value arrives in a register
2931 there is still an (uninitialized) stack slot allocated for it.
2932
2933 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
2934 whether this parameter already has a stack slot allocated,
2935 because an arg block exists only if current_function_args_size
2936 is larger than some threshhold, and we haven't calculated that
2937 yet. So, for now, we just assume that stack slots never exist
2938 in this case. */
2939 || REG_PARM_STACK_SPACE (fndecl) > 0
2940 #endif
2941 )
2942 {
2943 stack_args_size.constant += arg_size.constant;
2944 if (arg_size.var)
2945 ADD_PARM_SIZE (stack_args_size, arg_size.var);
2946 }
2947 else
2948 /* No stack slot was pushed for this parm. */
2949 stack_parm = 0;
2950
2951 /* Update info on where next arg arrives in registers. */
2952
2953 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
2954 passed_type, ! last_named);
2955
2956 /* If this is our second time through, we are done with this parm. */
2957 if (second_time)
2958 continue;
2959
2960 /* If we can't trust the parm stack slot to be aligned enough
2961 for its ultimate type, don't use that slot after entry.
2962 We'll make another stack slot, if we need one. */
2963 {
2964 #ifdef FUNCTION_ARG_BOUNDARY
2965 int thisparm_boundary
2966 = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type);
2967 #else
2968 int thisparm_boundary = PARM_BOUNDARY;
2969 #endif
2970
2971 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
2972 stack_parm = 0;
2973 }
2974
2975 /* Now adjust STACK_PARM to the mode and precise location
2976 where this parameter should live during execution,
2977 if we discover that it must live in the stack during execution.
2978 To make debuggers happier on big-endian machines, we store
2979 the value in the last bytes of the space available. */
2980
2981 if (nominal_mode != BLKmode && nominal_mode != passed_mode
2982 && stack_parm != 0)
2983 {
2984 rtx offset_rtx;
2985
2986 #if BYTES_BIG_ENDIAN
2987 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
2988 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
2989 - GET_MODE_SIZE (nominal_mode));
2990 #endif
2991
2992 offset_rtx = ARGS_SIZE_RTX (stack_offset);
2993 if (offset_rtx == const0_rtx)
2994 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
2995 else
2996 stack_parm = gen_rtx (MEM, nominal_mode,
2997 gen_rtx (PLUS, Pmode,
2998 internal_arg_pointer, offset_rtx));
2999
3000 /* If this is a memory ref that contains aggregate components,
3001 mark it as such for cse and loop optimize. */
3002 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3003 }
3004
3005 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3006 in the mode in which it arrives.
3007 STACK_PARM is an RTX for a stack slot where the parameter can live
3008 during the function (in case we want to put it there).
3009 STACK_PARM is 0 if no stack slot was pushed for it.
3010
3011 Now output code if necessary to convert ENTRY_PARM to
3012 the type in which this function declares it,
3013 and store that result in an appropriate place,
3014 which may be a pseudo reg, may be STACK_PARM,
3015 or may be a local stack slot if STACK_PARM is 0.
3016
3017 Set DECL_RTL to that place. */
3018
3019 if (nominal_mode == BLKmode)
3020 {
3021 /* If a BLKmode arrives in registers, copy it to a stack slot. */
3022 if (GET_CODE (entry_parm) == REG)
3023 {
3024 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3025 UNITS_PER_WORD);
3026
3027 /* Note that we will be storing an integral number of words.
3028 So we have to be careful to ensure that we allocate an
3029 integral number of words. We do this below in the
3030 assign_stack_local if space was not allocated in the argument
3031 list. If it was, this will not work if PARM_BOUNDARY is not
3032 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3033 if it becomes a problem. */
3034
3035 if (stack_parm == 0)
3036 {
3037 stack_parm
3038 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
3039 /* If this is a memory ref that contains aggregate components,
3040 mark it as such for cse and loop optimize. */
3041 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3042 }
3043
3044 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3045 abort ();
3046
3047 move_block_from_reg (REGNO (entry_parm),
3048 validize_mem (stack_parm),
3049 size_stored / UNITS_PER_WORD);
3050 }
3051 DECL_RTL (parm) = stack_parm;
3052 }
3053 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
3054 && ! DECL_INLINE (fndecl))
3055 /* layout_decl may set this. */
3056 || TREE_ADDRESSABLE (parm)
3057 || TREE_SIDE_EFFECTS (parm)
3058 /* If -ffloat-store specified, don't put explicit
3059 float variables into registers. */
3060 || (flag_float_store
3061 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3062 /* Always assign pseudo to structure return or item passed
3063 by invisible reference. */
3064 || passed_pointer || parm == function_result_decl)
3065 {
3066 /* Store the parm in a pseudoregister during the function, but we
3067 may need to do it in a wider mode. */
3068
3069 register rtx parmreg;
3070
3071 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3072 if (TREE_CODE (TREE_TYPE (parm)) == INTEGER_TYPE
3073 || TREE_CODE (TREE_TYPE (parm)) == ENUMERAL_TYPE
3074 || TREE_CODE (TREE_TYPE (parm)) == BOOLEAN_TYPE
3075 || TREE_CODE (TREE_TYPE (parm)) == CHAR_TYPE
3076 || TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE
3077 || TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE
3078 || TREE_CODE (TREE_TYPE (parm)) == OFFSET_TYPE)
3079 {
3080 PROMOTE_MODE (nominal_mode, unsignedp, TREE_TYPE (parm));
3081 }
3082
3083 parmreg = gen_reg_rtx (nominal_mode);
3084 REG_USERVAR_P (parmreg) = 1;
3085
3086 /* If this was an item that we received a pointer to, set DECL_RTL
3087 appropriately. */
3088 if (passed_pointer)
3089 {
3090 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3091 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3092 }
3093 else
3094 DECL_RTL (parm) = parmreg;
3095
3096 /* Copy the value into the register. */
3097 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
3098 {
3099 /* If ENTRY_PARM is a hard register, it might be in a register
3100 not valid for operating in its mode (e.g., an odd-numbered
3101 register for a DFmode). In that case, moves are the only
3102 thing valid, so we can't do a convert from there. This
3103 occurs when the calling sequence allow such misaligned
3104 usages.
3105
3106 In addition, the conversion may involve a call, which could
3107 clobber parameters which haven't been copied to pseudo
3108 registers yet. Therefore, we must first copy the parm to
3109 a pseudo reg here, and save the conversion until after all
3110 parameters have been moved. */
3111
3112 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3113
3114 emit_move_insn (tempreg, validize_mem (entry_parm));
3115
3116 push_to_sequence (conversion_insns);
3117 convert_move (parmreg, tempreg, unsignedp);
3118 conversion_insns = get_insns ();
3119 end_sequence ();
3120 }
3121 else
3122 emit_move_insn (parmreg, validize_mem (entry_parm));
3123
3124 /* If we were passed a pointer but the actual value
3125 can safely live in a register, put it in one. */
3126 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3127 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3128 && ! DECL_INLINE (fndecl))
3129 /* layout_decl may set this. */
3130 || TREE_ADDRESSABLE (parm)
3131 || TREE_SIDE_EFFECTS (parm)
3132 /* If -ffloat-store specified, don't put explicit
3133 float variables into registers. */
3134 || (flag_float_store
3135 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3136 {
3137 /* We can't use nominal_mode, because it will have been set to
3138 Pmode above. We must use the actual mode of the parm. */
3139 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3140 emit_move_insn (parmreg, DECL_RTL (parm));
3141 DECL_RTL (parm) = parmreg;
3142 }
3143
3144 /* In any case, record the parm's desired stack location
3145 in case we later discover it must live in the stack. */
3146 if (REGNO (parmreg) >= nparmregs)
3147 {
3148 rtx *new;
3149 nparmregs = REGNO (parmreg) + 5;
3150 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3151 bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx));
3152 parm_reg_stack_loc = new;
3153 }
3154 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3155
3156 /* Mark the register as eliminable if we did no conversion
3157 and it was copied from memory at a fixed offset,
3158 and the arg pointer was not copied to a pseudo-reg.
3159 If the arg pointer is a pseudo reg or the offset formed
3160 an invalid address, such memory-equivalences
3161 as we make here would screw up life analysis for it. */
3162 if (nominal_mode == passed_mode
3163 && GET_CODE (entry_parm) == MEM
3164 && entry_parm == stack_parm
3165 && stack_offset.var == 0
3166 && reg_mentioned_p (virtual_incoming_args_rtx,
3167 XEXP (entry_parm, 0)))
3168 REG_NOTES (get_last_insn ())
3169 = gen_rtx (EXPR_LIST, REG_EQUIV,
3170 entry_parm, REG_NOTES (get_last_insn ()));
3171
3172 /* For pointer data type, suggest pointer register. */
3173 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3174 mark_reg_pointer (parmreg);
3175 }
3176 else
3177 {
3178 /* Value must be stored in the stack slot STACK_PARM
3179 during function execution. */
3180
3181 if (passed_mode != nominal_mode)
3182 {
3183 /* Conversion is required. */
3184 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3185
3186 emit_move_insn (tempreg, validize_mem (entry_parm));
3187
3188 push_to_sequence (conversion_insns);
3189 entry_parm = convert_to_mode (nominal_mode, tempreg,
3190 TREE_UNSIGNED (TREE_TYPE (parm)));
3191 conversion_insns = get_insns ();
3192 end_sequence ();
3193 }
3194
3195 if (entry_parm != stack_parm)
3196 {
3197 if (stack_parm == 0)
3198 {
3199 stack_parm
3200 = assign_stack_local (GET_MODE (entry_parm),
3201 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3202 /* If this is a memory ref that contains aggregate components,
3203 mark it as such for cse and loop optimize. */
3204 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3205 }
3206
3207 if (passed_mode != nominal_mode)
3208 {
3209 push_to_sequence (conversion_insns);
3210 emit_move_insn (validize_mem (stack_parm),
3211 validize_mem (entry_parm));
3212 conversion_insns = get_insns ();
3213 end_sequence ();
3214 }
3215 else
3216 emit_move_insn (validize_mem (stack_parm),
3217 validize_mem (entry_parm));
3218 }
3219
3220 DECL_RTL (parm) = stack_parm;
3221 }
3222
3223 /* If this "parameter" was the place where we are receiving the
3224 function's incoming structure pointer, set up the result. */
3225 if (parm == function_result_decl)
3226 DECL_RTL (DECL_RESULT (fndecl))
3227 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
3228
3229 if (TREE_THIS_VOLATILE (parm))
3230 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
3231 if (TREE_READONLY (parm))
3232 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
3233 }
3234
3235 /* Output all parameter conversion instructions (possibly including calls)
3236 now that all parameters have been copied out of hard registers. */
3237 emit_insns (conversion_insns);
3238
3239 max_parm_reg = max_reg_num ();
3240 last_parm_insn = get_last_insn ();
3241
3242 current_function_args_size = stack_args_size.constant;
3243
3244 /* Adjust function incoming argument size for alignment and
3245 minimum length. */
3246
3247 #ifdef REG_PARM_STACK_SPACE
3248 #ifndef MAYBE_REG_PARM_STACK_SPACE
3249 current_function_args_size = MAX (current_function_args_size,
3250 REG_PARM_STACK_SPACE (fndecl));
3251 #endif
3252 #endif
3253
3254 #ifdef STACK_BOUNDARY
3255 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
3256
3257 current_function_args_size
3258 = ((current_function_args_size + STACK_BYTES - 1)
3259 / STACK_BYTES) * STACK_BYTES;
3260 #endif
3261
3262 #ifdef ARGS_GROW_DOWNWARD
3263 current_function_arg_offset_rtx
3264 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
3265 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
3266 size_int (-stack_args_size.constant)),
3267 NULL_RTX, VOIDmode, 0));
3268 #else
3269 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
3270 #endif
3271
3272 /* See how many bytes, if any, of its args a function should try to pop
3273 on return. */
3274
3275 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
3276 current_function_args_size);
3277
3278 /* For stdarg.h function, save info about regs and stack space
3279 used by the named args. */
3280
3281 if (stdarg)
3282 current_function_args_info = args_so_far;
3283
3284 /* Set the rtx used for the function return value. Put this in its
3285 own variable so any optimizers that need this information don't have
3286 to include tree.h. Do this here so it gets done when an inlined
3287 function gets output. */
3288
3289 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
3290 }
3291 \f
3292 /* Compute the size and offset from the start of the stacked arguments for a
3293 parm passed in mode PASSED_MODE and with type TYPE.
3294
3295 INITIAL_OFFSET_PTR points to the current offset into the stacked
3296 arguments.
3297
3298 The starting offset and size for this parm are returned in *OFFSET_PTR
3299 and *ARG_SIZE_PTR, respectively.
3300
3301 IN_REGS is non-zero if the argument will be passed in registers. It will
3302 never be set if REG_PARM_STACK_SPACE is not defined.
3303
3304 FNDECL is the function in which the argument was defined.
3305
3306 There are two types of rounding that are done. The first, controlled by
3307 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3308 list to be aligned to the specific boundary (in bits). This rounding
3309 affects the initial and starting offsets, but not the argument size.
3310
3311 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3312 optionally rounds the size of the parm to PARM_BOUNDARY. The
3313 initial offset is not affected by this rounding, while the size always
3314 is and the starting offset may be. */
3315
3316 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
3317 initial_offset_ptr is positive because locate_and_pad_parm's
3318 callers pass in the total size of args so far as
3319 initial_offset_ptr. arg_size_ptr is always positive.*/
3320
3321 static void pad_to_arg_alignment (), pad_below ();
3322
3323 void
3324 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
3325 initial_offset_ptr, offset_ptr, arg_size_ptr)
3326 enum machine_mode passed_mode;
3327 tree type;
3328 int in_regs;
3329 tree fndecl;
3330 struct args_size *initial_offset_ptr;
3331 struct args_size *offset_ptr;
3332 struct args_size *arg_size_ptr;
3333 {
3334 tree sizetree
3335 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3336 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3337 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3338 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3339 int reg_parm_stack_space = 0;
3340
3341 #ifdef REG_PARM_STACK_SPACE
3342 /* If we have found a stack parm before we reach the end of the
3343 area reserved for registers, skip that area. */
3344 if (! in_regs)
3345 {
3346 #ifdef MAYBE_REG_PARM_STACK_SPACE
3347 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3348 #else
3349 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3350 #endif
3351 if (reg_parm_stack_space > 0)
3352 {
3353 if (initial_offset_ptr->var)
3354 {
3355 initial_offset_ptr->var
3356 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3357 size_int (reg_parm_stack_space));
3358 initial_offset_ptr->constant = 0;
3359 }
3360 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3361 initial_offset_ptr->constant = reg_parm_stack_space;
3362 }
3363 }
3364 #endif /* REG_PARM_STACK_SPACE */
3365
3366 arg_size_ptr->var = 0;
3367 arg_size_ptr->constant = 0;
3368
3369 #ifdef ARGS_GROW_DOWNWARD
3370 if (initial_offset_ptr->var)
3371 {
3372 offset_ptr->constant = 0;
3373 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3374 initial_offset_ptr->var);
3375 }
3376 else
3377 {
3378 offset_ptr->constant = - initial_offset_ptr->constant;
3379 offset_ptr->var = 0;
3380 }
3381 if (where_pad == upward
3382 && (TREE_CODE (sizetree) != INTEGER_CST
3383 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3384 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3385 SUB_PARM_SIZE (*offset_ptr, sizetree);
3386 if (where_pad != downward)
3387 pad_to_arg_alignment (offset_ptr, boundary);
3388 if (initial_offset_ptr->var)
3389 {
3390 arg_size_ptr->var = size_binop (MINUS_EXPR,
3391 size_binop (MINUS_EXPR,
3392 integer_zero_node,
3393 initial_offset_ptr->var),
3394 offset_ptr->var);
3395 }
3396 else
3397 {
3398 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3399 offset_ptr->constant);
3400 }
3401 /* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3402 if (where_pad == downward)
3403 pad_below (arg_size_ptr, passed_mode, sizetree);
3404 #else /* !ARGS_GROW_DOWNWARD */
3405 pad_to_arg_alignment (initial_offset_ptr, boundary);
3406 *offset_ptr = *initial_offset_ptr;
3407 if (where_pad == downward)
3408 pad_below (offset_ptr, passed_mode, sizetree);
3409
3410 #ifdef PUSH_ROUNDING
3411 if (passed_mode != BLKmode)
3412 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3413 #endif
3414
3415 if (where_pad != none
3416 && (TREE_CODE (sizetree) != INTEGER_CST
3417 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3418 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3419
3420 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3421 #endif /* ARGS_GROW_DOWNWARD */
3422 }
3423
3424 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3425 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3426
3427 static void
3428 pad_to_arg_alignment (offset_ptr, boundary)
3429 struct args_size *offset_ptr;
3430 int boundary;
3431 {
3432 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3433
3434 if (boundary > BITS_PER_UNIT)
3435 {
3436 if (offset_ptr->var)
3437 {
3438 offset_ptr->var =
3439 #ifdef ARGS_GROW_DOWNWARD
3440 round_down
3441 #else
3442 round_up
3443 #endif
3444 (ARGS_SIZE_TREE (*offset_ptr),
3445 boundary / BITS_PER_UNIT);
3446 offset_ptr->constant = 0; /*?*/
3447 }
3448 else
3449 offset_ptr->constant =
3450 #ifdef ARGS_GROW_DOWNWARD
3451 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3452 #else
3453 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3454 #endif
3455 }
3456 }
3457
3458 static void
3459 pad_below (offset_ptr, passed_mode, sizetree)
3460 struct args_size *offset_ptr;
3461 enum machine_mode passed_mode;
3462 tree sizetree;
3463 {
3464 if (passed_mode != BLKmode)
3465 {
3466 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3467 offset_ptr->constant
3468 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3469 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3470 - GET_MODE_SIZE (passed_mode));
3471 }
3472 else
3473 {
3474 if (TREE_CODE (sizetree) != INTEGER_CST
3475 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3476 {
3477 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3478 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3479 /* Add it in. */
3480 ADD_PARM_SIZE (*offset_ptr, s2);
3481 SUB_PARM_SIZE (*offset_ptr, sizetree);
3482 }
3483 }
3484 }
3485
3486 static tree
3487 round_down (value, divisor)
3488 tree value;
3489 int divisor;
3490 {
3491 return size_binop (MULT_EXPR,
3492 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3493 size_int (divisor));
3494 }
3495 \f
3496 /* Walk the tree of blocks describing the binding levels within a function
3497 and warn about uninitialized variables.
3498 This is done after calling flow_analysis and before global_alloc
3499 clobbers the pseudo-regs to hard regs. */
3500
3501 void
3502 uninitialized_vars_warning (block)
3503 tree block;
3504 {
3505 register tree decl, sub;
3506 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3507 {
3508 if (TREE_CODE (decl) == VAR_DECL
3509 /* These warnings are unreliable for and aggregates
3510 because assigning the fields one by one can fail to convince
3511 flow.c that the entire aggregate was initialized.
3512 Unions are troublesome because members may be shorter. */
3513 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3514 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
3515 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3516 && DECL_RTL (decl) != 0
3517 && GET_CODE (DECL_RTL (decl)) == REG
3518 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3519 warning_with_decl (decl,
3520 "`%s' may be used uninitialized in this function");
3521 if (TREE_CODE (decl) == VAR_DECL
3522 && DECL_RTL (decl) != 0
3523 && GET_CODE (DECL_RTL (decl)) == REG
3524 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3525 warning_with_decl (decl,
3526 "variable `%s' may be clobbered by `longjmp'");
3527 }
3528 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3529 uninitialized_vars_warning (sub);
3530 }
3531
3532 /* Do the appropriate part of uninitialized_vars_warning
3533 but for arguments instead of local variables. */
3534
3535 void
3536 setjmp_args_warning (block)
3537 tree block;
3538 {
3539 register tree decl;
3540 for (decl = DECL_ARGUMENTS (current_function_decl);
3541 decl; decl = TREE_CHAIN (decl))
3542 if (DECL_RTL (decl) != 0
3543 && GET_CODE (DECL_RTL (decl)) == REG
3544 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3545 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp'");
3546 }
3547
3548 /* If this function call setjmp, put all vars into the stack
3549 unless they were declared `register'. */
3550
3551 void
3552 setjmp_protect (block)
3553 tree block;
3554 {
3555 register tree decl, sub;
3556 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3557 if ((TREE_CODE (decl) == VAR_DECL
3558 || TREE_CODE (decl) == PARM_DECL)
3559 && DECL_RTL (decl) != 0
3560 && GET_CODE (DECL_RTL (decl)) == REG
3561 /* If this variable came from an inline function, it must be
3562 that it's life doesn't overlap the setjmp. If there was a
3563 setjmp in the function, it would already be in memory. We
3564 must exclude such variable because their DECL_RTL might be
3565 set to strange things such as virtual_stack_vars_rtx. */
3566 && ! DECL_FROM_INLINE (decl)
3567 && (
3568 #ifdef NON_SAVING_SETJMP
3569 /* If longjmp doesn't restore the registers,
3570 don't put anything in them. */
3571 NON_SAVING_SETJMP
3572 ||
3573 #endif
3574 ! DECL_REGISTER (decl)))
3575 put_var_into_stack (decl);
3576 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3577 setjmp_protect (sub);
3578 }
3579 \f
3580 /* Like the previous function, but for args instead of local variables. */
3581
3582 void
3583 setjmp_protect_args ()
3584 {
3585 register tree decl, sub;
3586 for (decl = DECL_ARGUMENTS (current_function_decl);
3587 decl; decl = TREE_CHAIN (decl))
3588 if ((TREE_CODE (decl) == VAR_DECL
3589 || TREE_CODE (decl) == PARM_DECL)
3590 && DECL_RTL (decl) != 0
3591 && GET_CODE (DECL_RTL (decl)) == REG
3592 && (
3593 /* If longjmp doesn't restore the registers,
3594 don't put anything in them. */
3595 #ifdef NON_SAVING_SETJMP
3596 NON_SAVING_SETJMP
3597 ||
3598 #endif
3599 ! DECL_REGISTER (decl)))
3600 put_var_into_stack (decl);
3601 }
3602 \f
3603 /* Return the context-pointer register corresponding to DECL,
3604 or 0 if it does not need one. */
3605
3606 rtx
3607 lookup_static_chain (decl)
3608 tree decl;
3609 {
3610 tree context = decl_function_context (decl);
3611 tree link;
3612
3613 if (context == 0)
3614 return 0;
3615
3616 /* We treat inline_function_decl as an alias for the current function
3617 because that is the inline function whose vars, types, etc.
3618 are being merged into the current function.
3619 See expand_inline_function. */
3620 if (context == current_function_decl || context == inline_function_decl)
3621 return virtual_stack_vars_rtx;
3622
3623 for (link = context_display; link; link = TREE_CHAIN (link))
3624 if (TREE_PURPOSE (link) == context)
3625 return RTL_EXPR_RTL (TREE_VALUE (link));
3626
3627 abort ();
3628 }
3629 \f
3630 /* Convert a stack slot address ADDR for variable VAR
3631 (from a containing function)
3632 into an address valid in this function (using a static chain). */
3633
3634 rtx
3635 fix_lexical_addr (addr, var)
3636 rtx addr;
3637 tree var;
3638 {
3639 rtx basereg;
3640 int displacement;
3641 tree context = decl_function_context (var);
3642 struct function *fp;
3643 rtx base = 0;
3644
3645 /* If this is the present function, we need not do anything. */
3646 if (context == current_function_decl || context == inline_function_decl)
3647 return addr;
3648
3649 for (fp = outer_function_chain; fp; fp = fp->next)
3650 if (fp->decl == context)
3651 break;
3652
3653 if (fp == 0)
3654 abort ();
3655
3656 /* Decode given address as base reg plus displacement. */
3657 if (GET_CODE (addr) == REG)
3658 basereg = addr, displacement = 0;
3659 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3660 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3661 else
3662 abort ();
3663
3664 /* We accept vars reached via the containing function's
3665 incoming arg pointer and via its stack variables pointer. */
3666 if (basereg == fp->internal_arg_pointer)
3667 {
3668 /* If reached via arg pointer, get the arg pointer value
3669 out of that function's stack frame.
3670
3671 There are two cases: If a separate ap is needed, allocate a
3672 slot in the outer function for it and dereference it that way.
3673 This is correct even if the real ap is actually a pseudo.
3674 Otherwise, just adjust the offset from the frame pointer to
3675 compensate. */
3676
3677 #ifdef NEED_SEPARATE_AP
3678 rtx addr;
3679
3680 if (fp->arg_pointer_save_area == 0)
3681 fp->arg_pointer_save_area
3682 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
3683
3684 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
3685 addr = memory_address (Pmode, addr);
3686
3687 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
3688 #else
3689 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
3690 base = lookup_static_chain (var);
3691 #endif
3692 }
3693
3694 else if (basereg == virtual_stack_vars_rtx)
3695 {
3696 /* This is the same code as lookup_static_chain, duplicated here to
3697 avoid an extra call to decl_function_context. */
3698 tree link;
3699
3700 for (link = context_display; link; link = TREE_CHAIN (link))
3701 if (TREE_PURPOSE (link) == context)
3702 {
3703 base = RTL_EXPR_RTL (TREE_VALUE (link));
3704 break;
3705 }
3706 }
3707
3708 if (base == 0)
3709 abort ();
3710
3711 /* Use same offset, relative to appropriate static chain or argument
3712 pointer. */
3713 return plus_constant (base, displacement);
3714 }
3715 \f
3716 /* Return the address of the trampoline for entering nested fn FUNCTION.
3717 If necessary, allocate a trampoline (in the stack frame)
3718 and emit rtl to initialize its contents (at entry to this function). */
3719
3720 rtx
3721 trampoline_address (function)
3722 tree function;
3723 {
3724 tree link;
3725 tree rtlexp;
3726 rtx tramp;
3727 struct function *fp;
3728 tree fn_context;
3729
3730 /* Find an existing trampoline and return it. */
3731 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3732 if (TREE_PURPOSE (link) == function)
3733 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
3734 for (fp = outer_function_chain; fp; fp = fp->next)
3735 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
3736 if (TREE_PURPOSE (link) == function)
3737 {
3738 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
3739 function);
3740 return round_trampoline_addr (tramp);
3741 }
3742
3743 /* None exists; we must make one. */
3744
3745 /* Find the `struct function' for the function containing FUNCTION. */
3746 fp = 0;
3747 fn_context = decl_function_context (function);
3748 if (fn_context != current_function_decl)
3749 for (fp = outer_function_chain; fp; fp = fp->next)
3750 if (fp->decl == fn_context)
3751 break;
3752
3753 /* Allocate run-time space for this trampoline
3754 (usually in the defining function's stack frame). */
3755 #ifdef ALLOCATE_TRAMPOLINE
3756 tramp = ALLOCATE_TRAMPOLINE (fp);
3757 #else
3758 /* If rounding needed, allocate extra space
3759 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3760 #ifdef TRAMPOLINE_ALIGNMENT
3761 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3762 #else
3763 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3764 #endif
3765 if (fp != 0)
3766 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
3767 else
3768 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
3769 #endif
3770
3771 /* Record the trampoline for reuse and note it for later initialization
3772 by expand_function_end. */
3773 if (fp != 0)
3774 {
3775 push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack);
3776 rtlexp = make_node (RTL_EXPR);
3777 RTL_EXPR_RTL (rtlexp) = tramp;
3778 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
3779 pop_obstacks ();
3780 }
3781 else
3782 {
3783 /* Make the RTL_EXPR node temporary, not momentary, so that the
3784 trampoline_list doesn't become garbage. */
3785 int momentary = suspend_momentary ();
3786 rtlexp = make_node (RTL_EXPR);
3787 resume_momentary (momentary);
3788
3789 RTL_EXPR_RTL (rtlexp) = tramp;
3790 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
3791 }
3792
3793 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
3794 return round_trampoline_addr (tramp);
3795 }
3796
3797 /* Given a trampoline address,
3798 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3799
3800 static rtx
3801 round_trampoline_addr (tramp)
3802 rtx tramp;
3803 {
3804 #ifdef TRAMPOLINE_ALIGNMENT
3805 /* Round address up to desired boundary. */
3806 rtx temp = gen_reg_rtx (Pmode);
3807 temp = expand_binop (Pmode, add_optab, tramp,
3808 GEN_INT (TRAMPOLINE_ALIGNMENT - 1),
3809 temp, 0, OPTAB_LIB_WIDEN);
3810 tramp = expand_binop (Pmode, and_optab, temp,
3811 GEN_INT (- TRAMPOLINE_ALIGNMENT),
3812 temp, 0, OPTAB_LIB_WIDEN);
3813 #endif
3814 return tramp;
3815 }
3816 \f
3817 /* The functions identify_blocks and reorder_blocks provide a way to
3818 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
3819 duplicate portions of the RTL code. Call identify_blocks before
3820 changing the RTL, and call reorder_blocks after. */
3821
3822 static int all_blocks ();
3823 static tree blocks_nreverse ();
3824
3825 /* Put all this function's BLOCK nodes into a vector, and return it.
3826 Also store in each NOTE for the beginning or end of a block
3827 the index of that block in the vector.
3828 The arguments are TOP_BLOCK, the top-level block of the function,
3829 and INSNS, the insn chain of the function. */
3830
3831 tree *
3832 identify_blocks (top_block, insns)
3833 tree top_block;
3834 rtx insns;
3835 {
3836 int n_blocks;
3837 tree *block_vector;
3838 int *block_stack;
3839 int depth = 0;
3840 int next_block_number = 0;
3841 int current_block_number = 0;
3842 rtx insn;
3843
3844 if (top_block == 0)
3845 return 0;
3846
3847 n_blocks = all_blocks (top_block, 0);
3848 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
3849 block_stack = (int *) alloca (n_blocks * sizeof (int));
3850
3851 all_blocks (top_block, block_vector);
3852
3853 for (insn = insns; insn; insn = NEXT_INSN (insn))
3854 if (GET_CODE (insn) == NOTE)
3855 {
3856 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3857 {
3858 block_stack[depth++] = current_block_number;
3859 current_block_number = next_block_number;
3860 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
3861 }
3862 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3863 {
3864 current_block_number = block_stack[--depth];
3865 NOTE_BLOCK_NUMBER (insn) = current_block_number;
3866 }
3867 }
3868
3869 return block_vector;
3870 }
3871
3872 /* Given BLOCK_VECTOR which was returned by identify_blocks,
3873 and a revised instruction chain, rebuild the tree structure
3874 of BLOCK nodes to correspond to the new order of RTL.
3875 The new block tree is inserted below TOP_BLOCK.
3876 Returns the current top-level block. */
3877
3878 tree
3879 reorder_blocks (block_vector, top_block, insns)
3880 tree *block_vector;
3881 tree top_block;
3882 rtx insns;
3883 {
3884 tree current_block = top_block;
3885 rtx insn;
3886
3887 if (block_vector == 0)
3888 return top_block;
3889
3890 /* Prune the old tree away, so that it doesn't get in the way. */
3891 BLOCK_SUBBLOCKS (current_block) = 0;
3892
3893 for (insn = insns; insn; insn = NEXT_INSN (insn))
3894 if (GET_CODE (insn) == NOTE)
3895 {
3896 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3897 {
3898 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
3899 /* If we have seen this block before, copy it. */
3900 if (TREE_ASM_WRITTEN (block))
3901 block = copy_node (block);
3902 BLOCK_SUBBLOCKS (block) = 0;
3903 TREE_ASM_WRITTEN (block) = 1;
3904 BLOCK_SUPERCONTEXT (block) = current_block;
3905 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3906 BLOCK_SUBBLOCKS (current_block) = block;
3907 current_block = block;
3908 NOTE_SOURCE_FILE (insn) = 0;
3909 }
3910 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3911 {
3912 BLOCK_SUBBLOCKS (current_block)
3913 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3914 current_block = BLOCK_SUPERCONTEXT (current_block);
3915 NOTE_SOURCE_FILE (insn) = 0;
3916 }
3917 }
3918
3919 return current_block;
3920 }
3921
3922 /* Reverse the order of elements in the chain T of blocks,
3923 and return the new head of the chain (old last element). */
3924
3925 static tree
3926 blocks_nreverse (t)
3927 tree t;
3928 {
3929 register tree prev = 0, decl, next;
3930 for (decl = t; decl; decl = next)
3931 {
3932 next = BLOCK_CHAIN (decl);
3933 BLOCK_CHAIN (decl) = prev;
3934 prev = decl;
3935 }
3936 return prev;
3937 }
3938
3939 /* Count the subblocks of BLOCK, and list them all into the vector VECTOR.
3940 Also clear TREE_ASM_WRITTEN in all blocks. */
3941
3942 static int
3943 all_blocks (block, vector)
3944 tree block;
3945 tree *vector;
3946 {
3947 int n_blocks = 1;
3948 tree subblocks;
3949
3950 TREE_ASM_WRITTEN (block) = 0;
3951 /* Record this block. */
3952 if (vector)
3953 vector[0] = block;
3954
3955 /* Record the subblocks, and their subblocks. */
3956 for (subblocks = BLOCK_SUBBLOCKS (block);
3957 subblocks; subblocks = BLOCK_CHAIN (subblocks))
3958 n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0);
3959
3960 return n_blocks;
3961 }
3962 \f
3963 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3964 and initialize static variables for generating RTL for the statements
3965 of the function. */
3966
3967 void
3968 init_function_start (subr, filename, line)
3969 tree subr;
3970 char *filename;
3971 int line;
3972 {
3973 char *junk;
3974
3975 init_stmt_for_function ();
3976
3977 cse_not_expected = ! optimize;
3978
3979 /* Caller save not needed yet. */
3980 caller_save_needed = 0;
3981
3982 /* No stack slots have been made yet. */
3983 stack_slot_list = 0;
3984
3985 /* There is no stack slot for handling nonlocal gotos. */
3986 nonlocal_goto_handler_slot = 0;
3987 nonlocal_goto_stack_level = 0;
3988
3989 /* No labels have been declared for nonlocal use. */
3990 nonlocal_labels = 0;
3991
3992 /* No function calls so far in this function. */
3993 function_call_count = 0;
3994
3995 /* No parm regs have been allocated.
3996 (This is important for output_inline_function.) */
3997 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3998
3999 /* Initialize the RTL mechanism. */
4000 init_emit ();
4001
4002 /* Initialize the queue of pending postincrement and postdecrements,
4003 and some other info in expr.c. */
4004 init_expr ();
4005
4006 /* We haven't done register allocation yet. */
4007 reg_renumber = 0;
4008
4009 init_const_rtx_hash_table ();
4010
4011 current_function_name = (*decl_printable_name) (subr, &junk);
4012
4013 /* Nonzero if this is a nested function that uses a static chain. */
4014
4015 current_function_needs_context
4016 = (decl_function_context (current_function_decl) != 0);
4017
4018 /* Set if a call to setjmp is seen. */
4019 current_function_calls_setjmp = 0;
4020
4021 /* Set if a call to longjmp is seen. */
4022 current_function_calls_longjmp = 0;
4023
4024 current_function_calls_alloca = 0;
4025 current_function_has_nonlocal_label = 0;
4026 current_function_contains_functions = 0;
4027
4028 current_function_returns_pcc_struct = 0;
4029 current_function_returns_struct = 0;
4030 current_function_epilogue_delay_list = 0;
4031 current_function_uses_const_pool = 0;
4032 current_function_uses_pic_offset_table = 0;
4033
4034 /* We have not yet needed to make a label to jump to for tail-recursion. */
4035 tail_recursion_label = 0;
4036
4037 /* We haven't had a need to make a save area for ap yet. */
4038
4039 arg_pointer_save_area = 0;
4040
4041 /* No stack slots allocated yet. */
4042 frame_offset = 0;
4043
4044 /* No SAVE_EXPRs in this function yet. */
4045 save_expr_regs = 0;
4046
4047 /* No RTL_EXPRs in this function yet. */
4048 rtl_expr_chain = 0;
4049
4050 /* We have not allocated any temporaries yet. */
4051 temp_slots = 0;
4052 temp_slot_level = 0;
4053
4054 /* Within function body, compute a type's size as soon it is laid out. */
4055 immediate_size_expand++;
4056
4057 init_pending_stack_adjust ();
4058 inhibit_defer_pop = 0;
4059
4060 current_function_outgoing_args_size = 0;
4061
4062 /* Initialize the insn lengths. */
4063 init_insn_lengths ();
4064
4065 /* Prevent ever trying to delete the first instruction of a function.
4066 Also tell final how to output a linenum before the function prologue. */
4067 emit_line_note (filename, line);
4068
4069 /* Make sure first insn is a note even if we don't want linenums.
4070 This makes sure the first insn will never be deleted.
4071 Also, final expects a note to appear there. */
4072 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4073
4074 /* Set flags used by final.c. */
4075 if (aggregate_value_p (DECL_RESULT (subr)))
4076 {
4077 #ifdef PCC_STATIC_STRUCT_RETURN
4078 if (flag_pcc_struct_return)
4079 current_function_returns_pcc_struct = 1;
4080 else
4081 #endif
4082 current_function_returns_struct = 1;
4083 }
4084
4085 /* Warn if this value is an aggregate type,
4086 regardless of which calling convention we are using for it. */
4087 if (warn_aggregate_return
4088 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
4089 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
4090 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
4091 warning ("function returns an aggregate");
4092
4093 current_function_returns_pointer
4094 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
4095
4096 /* Indicate that we need to distinguish between the return value of the
4097 present function and the return value of a function being called. */
4098 rtx_equal_function_value_matters = 1;
4099
4100 /* Indicate that we have not instantiated virtual registers yet. */
4101 virtuals_instantiated = 0;
4102
4103 /* Indicate we have no need of a frame pointer yet. */
4104 frame_pointer_needed = 0;
4105
4106 /* By default assume not varargs. */
4107 current_function_varargs = 0;
4108 }
4109
4110 /* Indicate that the current function uses extra args
4111 not explicitly mentioned in the argument list in any fashion. */
4112
4113 void
4114 mark_varargs ()
4115 {
4116 current_function_varargs = 1;
4117 }
4118
4119 /* Expand a call to __main at the beginning of a possible main function. */
4120
4121 void
4122 expand_main_function ()
4123 {
4124 #if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
4125 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__main"), 0,
4126 VOIDmode, 0);
4127 #endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
4128 }
4129 \f
4130 /* Start the RTL for a new function, and set variables used for
4131 emitting RTL.
4132 SUBR is the FUNCTION_DECL node.
4133 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4134 the function's parameters, which must be run at any return statement. */
4135
4136 void
4137 expand_function_start (subr, parms_have_cleanups)
4138 tree subr;
4139 int parms_have_cleanups;
4140 {
4141 register int i;
4142 tree tem;
4143 rtx last_ptr;
4144
4145 /* Make sure volatile mem refs aren't considered
4146 valid operands of arithmetic insns. */
4147 init_recog_no_volatile ();
4148
4149 /* If function gets a static chain arg, store it in the stack frame.
4150 Do this first, so it gets the first stack slot offset. */
4151 if (current_function_needs_context)
4152 {
4153 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4154 emit_move_insn (last_ptr, static_chain_incoming_rtx);
4155 }
4156
4157 /* If the parameters of this function need cleaning up, get a label
4158 for the beginning of the code which executes those cleanups. This must
4159 be done before doing anything with return_label. */
4160 if (parms_have_cleanups)
4161 cleanup_label = gen_label_rtx ();
4162 else
4163 cleanup_label = 0;
4164
4165 /* Make the label for return statements to jump to, if this machine
4166 does not have a one-instruction return and uses an epilogue,
4167 or if it returns a structure, or if it has parm cleanups. */
4168 #ifdef HAVE_return
4169 if (cleanup_label == 0 && HAVE_return
4170 && ! current_function_returns_pcc_struct
4171 && ! (current_function_returns_struct && ! optimize))
4172 return_label = 0;
4173 else
4174 return_label = gen_label_rtx ();
4175 #else
4176 return_label = gen_label_rtx ();
4177 #endif
4178
4179 /* Initialize rtx used to return the value. */
4180 /* Do this before assign_parms so that we copy the struct value address
4181 before any library calls that assign parms might generate. */
4182
4183 /* Decide whether to return the value in memory or in a register. */
4184 if (aggregate_value_p (DECL_RESULT (subr)))
4185 {
4186 /* Returning something that won't go in a register. */
4187 register rtx value_address;
4188
4189 #ifdef PCC_STATIC_STRUCT_RETURN
4190 if (current_function_returns_pcc_struct)
4191 {
4192 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4193 value_address = assemble_static_space (size);
4194 }
4195 else
4196 #endif
4197 {
4198 /* Expect to be passed the address of a place to store the value.
4199 If it is passed as an argument, assign_parms will take care of
4200 it. */
4201 if (struct_value_incoming_rtx)
4202 {
4203 value_address = gen_reg_rtx (Pmode);
4204 emit_move_insn (value_address, struct_value_incoming_rtx);
4205 }
4206 }
4207 if (value_address)
4208 DECL_RTL (DECL_RESULT (subr))
4209 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
4210 value_address);
4211 }
4212 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4213 /* If return mode is void, this decl rtl should not be used. */
4214 DECL_RTL (DECL_RESULT (subr)) = 0;
4215 else if (parms_have_cleanups)
4216 {
4217 /* If function will end with cleanup code for parms,
4218 compute the return values into a pseudo reg,
4219 which we will copy into the true return register
4220 after the cleanups are done. */
4221
4222 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
4223 #ifdef PROMOTE_FUNCTION_RETURN
4224 tree type = TREE_TYPE (DECL_RESULT (subr));
4225 int unsignedp = TREE_UNSIGNED (type);
4226
4227 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
4228 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
4229 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
4230 || TREE_CODE (type) == OFFSET_TYPE)
4231 {
4232 PROMOTE_MODE (mode, unsignedp, type);
4233 }
4234 #endif
4235
4236 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
4237 }
4238 else
4239 /* Scalar, returned in a register. */
4240 {
4241 #ifdef FUNCTION_OUTGOING_VALUE
4242 DECL_RTL (DECL_RESULT (subr))
4243 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4244 #else
4245 DECL_RTL (DECL_RESULT (subr))
4246 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4247 #endif
4248
4249 /* Mark this reg as the function's return value. */
4250 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
4251 {
4252 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
4253 /* Needed because we may need to move this to memory
4254 in case it's a named return value whose address is taken. */
4255 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4256 }
4257 }
4258
4259 /* Initialize rtx for parameters and local variables.
4260 In some cases this requires emitting insns. */
4261
4262 assign_parms (subr, 0);
4263
4264 /* The following was moved from init_function_start.
4265 The move is supposed to make sdb output more accurate. */
4266 /* Indicate the beginning of the function body,
4267 as opposed to parm setup. */
4268 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
4269
4270 /* If doing stupid allocation, mark parms as born here. */
4271
4272 if (GET_CODE (get_last_insn ()) != NOTE)
4273 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4274 parm_birth_insn = get_last_insn ();
4275
4276 if (obey_regdecls)
4277 {
4278 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4279 use_variable (regno_reg_rtx[i]);
4280
4281 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4282 use_variable (current_function_internal_arg_pointer);
4283 }
4284
4285 /* Fetch static chain values for containing functions. */
4286 tem = decl_function_context (current_function_decl);
4287 /* If not doing stupid register allocation, then start off with the static
4288 chain pointer in a pseudo register. Otherwise, we use the stack
4289 address that was generated above. */
4290 if (tem && ! obey_regdecls)
4291 last_ptr = copy_to_reg (static_chain_incoming_rtx);
4292 context_display = 0;
4293 while (tem)
4294 {
4295 tree rtlexp = make_node (RTL_EXPR);
4296
4297 RTL_EXPR_RTL (rtlexp) = last_ptr;
4298 context_display = tree_cons (tem, rtlexp, context_display);
4299 tem = decl_function_context (tem);
4300 if (tem == 0)
4301 break;
4302 /* Chain thru stack frames, assuming pointer to next lexical frame
4303 is found at the place we always store it. */
4304 #ifdef FRAME_GROWS_DOWNWARD
4305 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
4306 #endif
4307 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
4308 memory_address (Pmode, last_ptr)));
4309 }
4310
4311 /* After the display initializations is where the tail-recursion label
4312 should go, if we end up needing one. Ensure we have a NOTE here
4313 since some things (like trampolines) get placed before this. */
4314 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
4315
4316 /* Evaluate now the sizes of any types declared among the arguments. */
4317 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
4318 expand_expr (TREE_VALUE (tem), NULL_RTX, VOIDmode, 0);
4319
4320 /* Make sure there is a line number after the function entry setup code. */
4321 force_next_line_note ();
4322 }
4323 \f
4324 /* Generate RTL for the end of the current function.
4325 FILENAME and LINE are the current position in the source file. */
4326
4327 /* It is up to language-specific callers to do cleanups for parameters. */
4328
4329 void
4330 expand_function_end (filename, line)
4331 char *filename;
4332 int line;
4333 {
4334 register int i;
4335 tree link;
4336
4337 static rtx initial_trampoline;
4338
4339 #ifdef NON_SAVING_SETJMP
4340 /* Don't put any variables in registers if we call setjmp
4341 on a machine that fails to restore the registers. */
4342 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
4343 {
4344 setjmp_protect (DECL_INITIAL (current_function_decl));
4345 setjmp_protect_args ();
4346 }
4347 #endif
4348
4349 /* Save the argument pointer if a save area was made for it. */
4350 if (arg_pointer_save_area)
4351 {
4352 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
4353 emit_insn_before (x, tail_recursion_reentry);
4354 }
4355
4356 /* Initialize any trampolines required by this function. */
4357 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4358 {
4359 tree function = TREE_PURPOSE (link);
4360 rtx context = lookup_static_chain (function);
4361 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
4362 rtx seq;
4363
4364 /* First make sure this compilation has a template for
4365 initializing trampolines. */
4366 if (initial_trampoline == 0)
4367 {
4368 end_temporary_allocation ();
4369 initial_trampoline
4370 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
4371 resume_temporary_allocation ();
4372 }
4373
4374 /* Generate insns to initialize the trampoline. */
4375 start_sequence ();
4376 tramp = change_address (initial_trampoline, BLKmode,
4377 round_trampoline_addr (XEXP (tramp, 0)));
4378 emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE),
4379 FUNCTION_BOUNDARY / BITS_PER_UNIT);
4380 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
4381 XEXP (DECL_RTL (function), 0), context);
4382 seq = get_insns ();
4383 end_sequence ();
4384
4385 /* Put those insns at entry to the containing function (this one). */
4386 emit_insns_before (seq, tail_recursion_reentry);
4387 }
4388 /* Clear the trampoline_list for the next function. */
4389 trampoline_list = 0;
4390
4391 #if 0 /* I think unused parms are legitimate enough. */
4392 /* Warn about unused parms. */
4393 if (warn_unused)
4394 {
4395 rtx decl;
4396
4397 for (decl = DECL_ARGUMENTS (current_function_decl);
4398 decl; decl = TREE_CHAIN (decl))
4399 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
4400 warning_with_decl (decl, "unused parameter `%s'");
4401 }
4402 #endif
4403
4404 /* Delete handlers for nonlocal gotos if nothing uses them. */
4405 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
4406 delete_handlers ();
4407
4408 /* End any sequences that failed to be closed due to syntax errors. */
4409 while (in_sequence_p ())
4410 end_sequence ();
4411
4412 /* Outside function body, can't compute type's actual size
4413 until next function's body starts. */
4414 immediate_size_expand--;
4415
4416 /* If doing stupid register allocation,
4417 mark register parms as dying here. */
4418
4419 if (obey_regdecls)
4420 {
4421 rtx tem;
4422 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4423 use_variable (regno_reg_rtx[i]);
4424
4425 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
4426
4427 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
4428 {
4429 use_variable (XEXP (tem, 0));
4430 use_variable_after (XEXP (tem, 0), parm_birth_insn);
4431 }
4432
4433 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4434 use_variable (current_function_internal_arg_pointer);
4435 }
4436
4437 clear_pending_stack_adjust ();
4438 do_pending_stack_adjust ();
4439
4440 /* Mark the end of the function body.
4441 If control reaches this insn, the function can drop through
4442 without returning a value. */
4443 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
4444
4445 /* Output a linenumber for the end of the function.
4446 SDB depends on this. */
4447 emit_line_note_force (filename, line);
4448
4449 /* Output the label for the actual return from the function,
4450 if one is expected. This happens either because a function epilogue
4451 is used instead of a return instruction, or because a return was done
4452 with a goto in order to run local cleanups, or because of pcc-style
4453 structure returning. */
4454
4455 if (return_label)
4456 emit_label (return_label);
4457
4458 /* If we had calls to alloca, and this machine needs
4459 an accurate stack pointer to exit the function,
4460 insert some code to save and restore the stack pointer. */
4461 #ifdef EXIT_IGNORE_STACK
4462 if (! EXIT_IGNORE_STACK)
4463 #endif
4464 if (current_function_calls_alloca)
4465 {
4466 rtx tem = 0;
4467
4468 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4469 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4470 }
4471
4472 /* If scalar return value was computed in a pseudo-reg,
4473 copy that to the hard return register. */
4474 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
4475 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
4476 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
4477 >= FIRST_PSEUDO_REGISTER))
4478 {
4479 rtx real_decl_result;
4480
4481 #ifdef FUNCTION_OUTGOING_VALUE
4482 real_decl_result
4483 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4484 current_function_decl);
4485 #else
4486 real_decl_result
4487 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4488 current_function_decl);
4489 #endif
4490 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
4491 emit_move_insn (real_decl_result,
4492 DECL_RTL (DECL_RESULT (current_function_decl)));
4493 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
4494 }
4495
4496 /* If returning a structure, arrange to return the address of the value
4497 in a place where debuggers expect to find it.
4498
4499 If returning a structure PCC style,
4500 the caller also depends on this value.
4501 And current_function_returns_pcc_struct is not necessarily set. */
4502 if (current_function_returns_struct
4503 || current_function_returns_pcc_struct)
4504 {
4505 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4506 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4507 #ifdef FUNCTION_OUTGOING_VALUE
4508 rtx outgoing
4509 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4510 current_function_decl);
4511 #else
4512 rtx outgoing
4513 = FUNCTION_VALUE (build_pointer_type (type),
4514 current_function_decl);
4515 #endif
4516
4517 /* Mark this as a function return value so integrate will delete the
4518 assignment and USE below when inlining this function. */
4519 REG_FUNCTION_VALUE_P (outgoing) = 1;
4520
4521 emit_move_insn (outgoing, value_address);
4522 use_variable (outgoing);
4523 }
4524
4525 /* Output a return insn if we are using one.
4526 Otherwise, let the rtl chain end here, to drop through
4527 into the epilogue. */
4528
4529 #ifdef HAVE_return
4530 if (HAVE_return)
4531 {
4532 emit_jump_insn (gen_return ());
4533 emit_barrier ();
4534 }
4535 #endif
4536
4537 /* Fix up any gotos that jumped out to the outermost
4538 binding level of the function.
4539 Must follow emitting RETURN_LABEL. */
4540
4541 /* If you have any cleanups to do at this point,
4542 and they need to create temporary variables,
4543 then you will lose. */
4544 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0);
4545 }
4546 \f
4547 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
4548
4549 static int *prologue;
4550 static int *epilogue;
4551
4552 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
4553 or a single insn). */
4554
4555 static int *
4556 record_insns (insns)
4557 rtx insns;
4558 {
4559 int *vec;
4560
4561 if (GET_CODE (insns) == SEQUENCE)
4562 {
4563 int len = XVECLEN (insns, 0);
4564 vec = (int *) oballoc ((len + 1) * sizeof (int));
4565 vec[len] = 0;
4566 while (--len >= 0)
4567 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
4568 }
4569 else
4570 {
4571 vec = (int *) oballoc (2 * sizeof (int));
4572 vec[0] = INSN_UID (insns);
4573 vec[1] = 0;
4574 }
4575 return vec;
4576 }
4577
4578 /* Determine how many INSN_UIDs in VEC are part of INSN. */
4579
4580 static int
4581 contains (insn, vec)
4582 rtx insn;
4583 int *vec;
4584 {
4585 register int i, j;
4586
4587 if (GET_CODE (insn) == INSN
4588 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4589 {
4590 int count = 0;
4591 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4592 for (j = 0; vec[j]; j++)
4593 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
4594 count++;
4595 return count;
4596 }
4597 else
4598 {
4599 for (j = 0; vec[j]; j++)
4600 if (INSN_UID (insn) == vec[j])
4601 return 1;
4602 }
4603 return 0;
4604 }
4605
4606 /* Generate the prologe and epilogue RTL if the machine supports it. Thread
4607 this into place with notes indicating where the prologue ends and where
4608 the epilogue begins. Update the basic block information when possible. */
4609
4610 void
4611 thread_prologue_and_epilogue_insns (f)
4612 rtx f;
4613 {
4614 #ifdef HAVE_prologue
4615 if (HAVE_prologue)
4616 {
4617 rtx head, seq, insn;
4618
4619 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
4620 prologue insns and a NOTE_INSN_PROLOGUE_END. */
4621 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
4622 seq = gen_prologue ();
4623 head = emit_insn_after (seq, f);
4624
4625 /* Include the new prologue insns in the first block. Ignore them
4626 if they form a basic block unto themselves. */
4627 if (basic_block_head && n_basic_blocks
4628 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
4629 basic_block_head[0] = NEXT_INSN (f);
4630
4631 /* Retain a map of the prologue insns. */
4632 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
4633 }
4634 else
4635 #endif
4636 prologue = 0;
4637
4638 #ifdef HAVE_epilogue
4639 if (HAVE_epilogue)
4640 {
4641 rtx insn = get_last_insn ();
4642 rtx prev = prev_nonnote_insn (insn);
4643
4644 /* If we end with a BARRIER, we don't need an epilogue. */
4645 if (! (prev && GET_CODE (prev) == BARRIER))
4646 {
4647 rtx tail, seq;
4648
4649 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG,
4650 the epilogue insns (this must include the jump insn that
4651 returns), USE insns ad the end of a function, and a BARRIER. */
4652
4653 emit_barrier_after (insn);
4654
4655 /* Place the epilogue before the USE insns at the end of a
4656 function. */
4657 while (prev
4658 && GET_CODE (prev) == INSN
4659 && GET_CODE (PATTERN (prev)) == USE)
4660 {
4661 insn = PREV_INSN (prev);
4662 prev = prev_nonnote_insn (prev);
4663 }
4664
4665 seq = gen_epilogue ();
4666 tail = emit_jump_insn_after (seq, insn);
4667 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
4668
4669 /* Include the new epilogue insns in the last block. Ignore
4670 them if they form a basic block unto themselves. */
4671 if (basic_block_end && n_basic_blocks
4672 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
4673 basic_block_end[n_basic_blocks - 1] = tail;
4674
4675 /* Retain a map of the epilogue insns. */
4676 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
4677 return;
4678 }
4679 }
4680 #endif
4681 epilogue = 0;
4682 }
4683
4684 /* Reposition the prologue-end and epilogue-begin notes after instruction
4685 scheduling and delayed branch scheduling. */
4686
4687 void
4688 reposition_prologue_and_epilogue_notes (f)
4689 rtx f;
4690 {
4691 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
4692 /* Reposition the prologue and epilogue notes. */
4693 if (n_basic_blocks)
4694 {
4695 rtx next, prev;
4696 int len;
4697
4698 if (prologue)
4699 {
4700 register rtx insn, note = 0;
4701
4702 /* Scan from the beginning until we reach the last prologue insn.
4703 We apparently can't depend on basic_block_{head,end} after
4704 reorg has run. */
4705 for (len = 0; prologue[len]; len++)
4706 ;
4707 for (insn = f; insn; insn = NEXT_INSN (insn))
4708 if (GET_CODE (insn) == NOTE)
4709 {
4710 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
4711 note = insn;
4712 }
4713 else if ((len -= contains (insn, prologue)) == 0)
4714 {
4715 /* Find the prologue-end note if we haven't already, and
4716 move it to just after the last prologue insn. */
4717 if (note == 0)
4718 for (note = insn; note = NEXT_INSN (note);)
4719 if (GET_CODE (note) == NOTE
4720 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
4721 break;
4722 next = NEXT_INSN (note);
4723 prev = PREV_INSN (note);
4724 if (prev)
4725 NEXT_INSN (prev) = next;
4726 if (next)
4727 PREV_INSN (next) = prev;
4728 add_insn_after (note, insn);
4729 break;
4730 }
4731 }
4732
4733 if (epilogue)
4734 {
4735 register rtx insn, note = 0;
4736
4737 /* Scan from the end until we reach the first epilogue insn.
4738 We apparently can't depend on basic_block_{head,end} after
4739 reorg has run. */
4740 for (len = 0; epilogue[len]; len++)
4741 ;
4742 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
4743 if (GET_CODE (insn) == NOTE)
4744 {
4745 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4746 note = insn;
4747 }
4748 else if ((len -= contains (insn, epilogue)) == 0)
4749 {
4750 /* Find the epilogue-begin note if we haven't already, and
4751 move it to just before the first epilogue insn. */
4752 if (note == 0)
4753 for (note = insn; note = PREV_INSN (note);)
4754 if (GET_CODE (note) == NOTE
4755 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
4756 break;
4757 next = NEXT_INSN (note);
4758 prev = PREV_INSN (note);
4759 if (prev)
4760 NEXT_INSN (prev) = next;
4761 if (next)
4762 PREV_INSN (next) = prev;
4763 add_insn_after (note, PREV_INSN (insn));
4764 break;
4765 }
4766 }
4767 }
4768 #endif /* HAVE_prologue or HAVE_epilogue */
4769 }
This page took 0.249824 seconds and 6 git commands to generate.