]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
*** empty log message ***
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1991 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
29
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
34
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
39
40 #include "config.h"
41
42 #include <stdio.h>
43
44 #include "rtl.h"
45 #include "tree.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56
57 /* Round a value to the lowest integer less than it that is a multiple of
58 the required alignment. Avoid using division in case the value is
59 negative. Assume the alignment is a power of two. */
60 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
61
62 /* Similar, but round to the next highest integer that meets the
63 alignment. */
64 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
65
66 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
67 during rtl generation. If they are different register numbers, this is
68 always true. It may also be true if
69 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
70 generation. See fix_lexical_addr for details. */
71
72 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
73 #define NEED_SEPARATE_AP
74 #endif
75
76 /* Number of bytes of args popped by function being compiled on its return.
77 Zero if no bytes are to be popped.
78 May affect compilation of return insn or of function epilogue. */
79
80 int current_function_pops_args;
81
82 /* Nonzero if function being compiled needs to be given an address
83 where the value should be stored. */
84
85 int current_function_returns_struct;
86
87 /* Nonzero if function being compiled needs to
88 return the address of where it has put a structure value. */
89
90 int current_function_returns_pcc_struct;
91
92 /* Nonzero if function being compiled needs to be passed a static chain. */
93
94 int current_function_needs_context;
95
96 /* Nonzero if function being compiled can call setjmp. */
97
98 int current_function_calls_setjmp;
99
100 /* Nonzero if function being compiled can call longjmp. */
101
102 int current_function_calls_longjmp;
103
104 /* Nonzero if function being compiled receives nonlocal gotos
105 from nested functions. */
106
107 int current_function_has_nonlocal_label;
108
109 /* Nonzero if function being compiled contains nested functions. */
110
111 int current_function_contains_functions;
112
113 /* Nonzero if function being compiled can call alloca,
114 either as a subroutine or builtin. */
115
116 int current_function_calls_alloca;
117
118 /* Nonzero if the current function returns a pointer type */
119
120 int current_function_returns_pointer;
121
122 /* If some insns can be deferred to the delay slots of the epilogue, the
123 delay list for them is recorded here. */
124
125 rtx current_function_epilogue_delay_list;
126
127 /* If function's args have a fixed size, this is that size, in bytes.
128 Otherwise, it is -1.
129 May affect compilation of return insn or of function epilogue. */
130
131 int current_function_args_size;
132
133 /* # bytes the prologue should push and pretend that the caller pushed them.
134 The prologue must do this, but only if parms can be passed in registers. */
135
136 int current_function_pretend_args_size;
137
138 /* # of bytes of outgoing arguments required to be pushed by the prologue.
139 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
140 and no stack adjusts will be done on function calls. */
141
142 int current_function_outgoing_args_size;
143
144 /* This is the offset from the arg pointer to the place where the first
145 anonymous arg can be found, if there is one. */
146
147 rtx current_function_arg_offset_rtx;
148
149 /* Nonzero if current function uses varargs.h or equivalent.
150 Zero for functions that use stdarg.h. */
151
152 int current_function_varargs;
153
154 /* Quantities of various kinds of registers
155 used for the current function's args. */
156
157 CUMULATIVE_ARGS current_function_args_info;
158
159 /* Name of function now being compiled. */
160
161 char *current_function_name;
162
163 /* If non-zero, an RTL expression for that location at which the current
164 function returns its result. Always equal to
165 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
166 independently of the tree structures. */
167
168 rtx current_function_return_rtx;
169
170 /* Nonzero if the current function uses the constant pool. */
171
172 int current_function_uses_const_pool;
173
174 /* Nonzero if the current function uses pic_offset_table_rtx. */
175 int current_function_uses_pic_offset_table;
176
177 /* The arg pointer hard register, or the pseudo into which it was copied. */
178 rtx current_function_internal_arg_pointer;
179
180 /* The FUNCTION_DECL for an inline function currently being expanded. */
181 tree inline_function_decl;
182
183 /* Number of function calls seen so far in current function. */
184
185 int function_call_count;
186
187 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
188 (labels to which there can be nonlocal gotos from nested functions)
189 in this function. */
190
191 tree nonlocal_labels;
192
193 /* RTX for stack slot that holds the current handler for nonlocal gotos.
194 Zero when function does not have nonlocal labels. */
195
196 rtx nonlocal_goto_handler_slot;
197
198 /* RTX for stack slot that holds the stack pointer value to restore
199 for a nonlocal goto.
200 Zero when function does not have nonlocal labels. */
201
202 rtx nonlocal_goto_stack_level;
203
204 /* Label that will go on parm cleanup code, if any.
205 Jumping to this label runs cleanup code for parameters, if
206 such code must be run. Following this code is the logical return label. */
207
208 rtx cleanup_label;
209
210 /* Label that will go on function epilogue.
211 Jumping to this label serves as a "return" instruction
212 on machines which require execution of the epilogue on all returns. */
213
214 rtx return_label;
215
216 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
217 So we can mark them all live at the end of the function, if nonopt. */
218 rtx save_expr_regs;
219
220 /* List (chain of EXPR_LISTs) of all stack slots in this function.
221 Made for the sake of unshare_all_rtl. */
222 rtx stack_slot_list;
223
224 /* Chain of all RTL_EXPRs that have insns in them. */
225 tree rtl_expr_chain;
226
227 /* Label to jump back to for tail recursion, or 0 if we have
228 not yet needed one for this function. */
229 rtx tail_recursion_label;
230
231 /* Place after which to insert the tail_recursion_label if we need one. */
232 rtx tail_recursion_reentry;
233
234 /* Location at which to save the argument pointer if it will need to be
235 referenced. There are two cases where this is done: if nonlocal gotos
236 exist, or if vars stored at an offset from the argument pointer will be
237 needed by inner routines. */
238
239 rtx arg_pointer_save_area;
240
241 /* Offset to end of allocated area of stack frame.
242 If stack grows down, this is the address of the last stack slot allocated.
243 If stack grows up, this is the address for the next slot. */
244 int frame_offset;
245
246 /* List (chain of TREE_LISTs) of static chains for containing functions.
247 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
248 in an RTL_EXPR in the TREE_VALUE. */
249 static tree context_display;
250
251 /* List (chain of TREE_LISTs) of trampolines for nested functions.
252 The trampoline sets up the static chain and jumps to the function.
253 We supply the trampoline's address when the function's address is requested.
254
255 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
256 in an RTL_EXPR in the TREE_VALUE. */
257 static tree trampoline_list;
258
259 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
260 static rtx parm_birth_insn;
261
262 #if 0
263 /* Nonzero if a stack slot has been generated whose address is not
264 actually valid. It means that the generated rtl must all be scanned
265 to detect and correct the invalid addresses where they occur. */
266 static int invalid_stack_slot;
267 #endif
268
269 /* Last insn of those whose job was to put parms into their nominal homes. */
270 static rtx last_parm_insn;
271
272 /* 1 + last pseudo register number used for loading a copy
273 of a parameter of this function. */
274 static int max_parm_reg;
275
276 /* Vector indexed by REGNO, containing location on stack in which
277 to put the parm which is nominally in pseudo register REGNO,
278 if we discover that that parm must go in the stack. */
279 static rtx *parm_reg_stack_loc;
280
281 #if 0 /* Turned off because 0 seems to work just as well. */
282 /* Cleanup lists are required for binding levels regardless of whether
283 that binding level has cleanups or not. This node serves as the
284 cleanup list whenever an empty list is required. */
285 static tree empty_cleanup_list;
286 #endif
287
288 /* Nonzero once virtual register instantiation has been done.
289 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
290 static int virtuals_instantiated;
291
292 /* Nonzero if we need to distinguish between the return value of this function
293 and the return value of a function called by this function. This helps
294 integrate.c */
295
296 extern int rtx_equal_function_value_matters;
297
298 void fixup_gotos ();
299
300 static tree round_down ();
301 static rtx round_trampoline_addr ();
302 static rtx fixup_stack_1 ();
303 static void fixup_var_refs ();
304 static void fixup_var_refs_insns ();
305 static void fixup_var_refs_1 ();
306 static void optimize_bit_field ();
307 static void instantiate_decls ();
308 static void instantiate_decls_1 ();
309 static int instantiate_virtual_regs_1 ();
310 static rtx fixup_memory_subreg ();
311 static rtx walk_fixup_memory_subreg ();
312 \f
313 /* In order to evaluate some expressions, such as function calls returning
314 structures in memory, we need to temporarily allocate stack locations.
315 We record each allocated temporary in the following structure.
316
317 Associated with each temporary slot is a nesting level. When we pop up
318 one level, all temporaries associated with the previous level are freed.
319 Normally, all temporaries are freed after the execution of the statement
320 in which they were created. However, if we are inside a ({...}) grouping,
321 the result may be in a temporary and hence must be preserved. If the
322 result could be in a temporary, we preserve it if we can determine which
323 one it is in. If we cannot determine which temporary may contain the
324 result, all temporaries are preserved. A temporary is preserved by
325 pretending it was allocated at the previous nesting level.
326
327 Automatic variables are also assigned temporary slots, at the nesting
328 level where they are defined. They are marked a "kept" so that
329 free_temp_slots will not free them. */
330
331 struct temp_slot
332 {
333 /* Points to next temporary slot. */
334 struct temp_slot *next;
335 /* The rtx to used to reference the slot. */
336 rtx slot;
337 /* The size, in units, of the slot. */
338 int size;
339 /* Non-zero if this temporary is currently in use. */
340 char in_use;
341 /* Nesting level at which this slot is being used. */
342 int level;
343 /* Non-zero if this should survive a call to free_temp_slots. */
344 int keep;
345 };
346
347 /* List of all temporaries allocated, both available and in use. */
348
349 struct temp_slot *temp_slots;
350
351 /* Current nesting level for temporaries. */
352
353 int temp_slot_level;
354 \f
355 /* Pointer to chain of `struct function' for containing functions. */
356 struct function *outer_function_chain;
357
358 /* Given a function decl for a containing function,
359 return the `struct function' for it. */
360
361 struct function *
362 find_function_data (decl)
363 tree decl;
364 {
365 struct function *p;
366 for (p = outer_function_chain; p; p = p->next)
367 if (p->decl == decl)
368 return p;
369 abort ();
370 }
371
372 /* Save the current context for compilation of a nested function.
373 This is called from language-specific code.
374 The caller is responsible for saving any language-specific status,
375 since this function knows only about language-indepedent variables. */
376
377 void
378 push_function_context ()
379 {
380 struct function *p = (struct function *) xmalloc (sizeof (struct function));
381
382 p->next = outer_function_chain;
383 outer_function_chain = p;
384
385 p->name = current_function_name;
386 p->decl = current_function_decl;
387 p->pops_args = current_function_pops_args;
388 p->returns_struct = current_function_returns_struct;
389 p->returns_pcc_struct = current_function_returns_pcc_struct;
390 p->needs_context = current_function_needs_context;
391 p->calls_setjmp = current_function_calls_setjmp;
392 p->calls_longjmp = current_function_calls_longjmp;
393 p->calls_alloca = current_function_calls_alloca;
394 p->has_nonlocal_label = current_function_has_nonlocal_label;
395 p->args_size = current_function_args_size;
396 p->pretend_args_size = current_function_pretend_args_size;
397 p->arg_offset_rtx = current_function_arg_offset_rtx;
398 p->uses_const_pool = current_function_uses_const_pool;
399 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
400 p->internal_arg_pointer = current_function_internal_arg_pointer;
401 p->max_parm_reg = max_parm_reg;
402 p->parm_reg_stack_loc = parm_reg_stack_loc;
403 p->outgoing_args_size = current_function_outgoing_args_size;
404 p->return_rtx = current_function_return_rtx;
405 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
406 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
407 p->nonlocal_labels = nonlocal_labels;
408 p->cleanup_label = cleanup_label;
409 p->return_label = return_label;
410 p->save_expr_regs = save_expr_regs;
411 p->stack_slot_list = stack_slot_list;
412 p->parm_birth_insn = parm_birth_insn;
413 p->frame_offset = frame_offset;
414 p->tail_recursion_label = tail_recursion_label;
415 p->tail_recursion_reentry = tail_recursion_reentry;
416 p->arg_pointer_save_area = arg_pointer_save_area;
417 p->rtl_expr_chain = rtl_expr_chain;
418 p->last_parm_insn = last_parm_insn;
419 p->context_display = context_display;
420 p->trampoline_list = trampoline_list;
421 p->function_call_count = function_call_count;
422 p->temp_slots = temp_slots;
423 p->temp_slot_level = temp_slot_level;
424 p->fixup_var_refs_queue = 0;
425
426 save_tree_status (p);
427 save_storage_status (p);
428 save_emit_status (p);
429 init_emit ();
430 save_expr_status (p);
431 save_stmt_status (p);
432 }
433
434 /* Restore the last saved context, at the end of a nested function.
435 This function is called from language-specific code. */
436
437 void
438 pop_function_context ()
439 {
440 struct function *p = outer_function_chain;
441
442 outer_function_chain = p->next;
443
444 current_function_name = p->name;
445 current_function_decl = p->decl;
446 current_function_pops_args = p->pops_args;
447 current_function_returns_struct = p->returns_struct;
448 current_function_returns_pcc_struct = p->returns_pcc_struct;
449 current_function_needs_context = p->needs_context;
450 current_function_calls_setjmp = p->calls_setjmp;
451 current_function_calls_longjmp = p->calls_longjmp;
452 current_function_calls_alloca = p->calls_alloca;
453 current_function_has_nonlocal_label = p->has_nonlocal_label;
454 current_function_contains_functions = 1;
455 current_function_args_size = p->args_size;
456 current_function_pretend_args_size = p->pretend_args_size;
457 current_function_arg_offset_rtx = p->arg_offset_rtx;
458 current_function_uses_const_pool = p->uses_const_pool;
459 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
460 current_function_internal_arg_pointer = p->internal_arg_pointer;
461 max_parm_reg = p->max_parm_reg;
462 parm_reg_stack_loc = p->parm_reg_stack_loc;
463 current_function_outgoing_args_size = p->outgoing_args_size;
464 current_function_return_rtx = p->return_rtx;
465 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
466 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
467 nonlocal_labels = p->nonlocal_labels;
468 cleanup_label = p->cleanup_label;
469 return_label = p->return_label;
470 save_expr_regs = p->save_expr_regs;
471 stack_slot_list = p->stack_slot_list;
472 parm_birth_insn = p->parm_birth_insn;
473 frame_offset = p->frame_offset;
474 tail_recursion_label = p->tail_recursion_label;
475 tail_recursion_reentry = p->tail_recursion_reentry;
476 arg_pointer_save_area = p->arg_pointer_save_area;
477 rtl_expr_chain = p->rtl_expr_chain;
478 last_parm_insn = p->last_parm_insn;
479 context_display = p->context_display;
480 trampoline_list = p->trampoline_list;
481 function_call_count = p->function_call_count;
482 temp_slots = p->temp_slots;
483 temp_slot_level = p->temp_slot_level;
484
485 restore_tree_status (p);
486 restore_storage_status (p);
487 restore_expr_status (p);
488 restore_emit_status (p);
489 restore_stmt_status (p);
490
491 /* Finish doing put_var_into_stack for any of our variables
492 which became addressable during the nested function. */
493 {
494 struct var_refs_queue *queue = p->fixup_var_refs_queue;
495 for (; queue; queue = queue->next)
496 fixup_var_refs (queue->modified);
497 }
498
499 free (p);
500
501 /* Reset variables that have known state during rtx generation. */
502 rtx_equal_function_value_matters = 1;
503 virtuals_instantiated = 0;
504 }
505 \f
506 /* Allocate fixed slots in the stack frame of the current function. */
507
508 /* Return size needed for stack frame based on slots so far allocated.
509 This size counts from zero. It is not rounded to STACK_BOUNDARY;
510 the caller may have to do that. */
511
512 int
513 get_frame_size ()
514 {
515 #ifdef FRAME_GROWS_DOWNWARD
516 return -frame_offset;
517 #else
518 return frame_offset;
519 #endif
520 }
521
522 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
523 with machine mode MODE.
524
525 ALIGN controls the amount of alignment for the address of the slot:
526 0 means according to MODE,
527 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
528 positive specifies alignment boundary in bits.
529
530 We do not round to stack_boundary here. */
531
532 rtx
533 assign_stack_local (mode, size, align)
534 enum machine_mode mode;
535 int size;
536 int align;
537 {
538 register rtx x, addr;
539 int bigend_correction = 0;
540 int alignment;
541
542 if (align == 0)
543 {
544 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
545 if (mode == BLKmode)
546 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
547 }
548 else if (align == -1)
549 {
550 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
551 size = CEIL_ROUND (size, alignment);
552 }
553 else
554 alignment = align / BITS_PER_UNIT;
555
556 /* Round frame offset to that alignment.
557 We must be careful here, since FRAME_OFFSET might be negative and
558 division with a negative dividend isn't as well defined as we might
559 like. So we instead assume that ALIGNMENT is a power of two and
560 use logical operations which are unambiguous. */
561 #ifdef FRAME_GROWS_DOWNWARD
562 frame_offset = FLOOR_ROUND (frame_offset, alignment);
563 #else
564 frame_offset = CEIL_ROUND (frame_offset, alignment);
565 #endif
566
567 /* On a big-endian machine, if we are allocating more space than we will use,
568 use the least significant bytes of those that are allocated. */
569 #if BYTES_BIG_ENDIAN
570 if (mode != BLKmode)
571 bigend_correction = size - GET_MODE_SIZE (mode);
572 #endif
573
574 #ifdef FRAME_GROWS_DOWNWARD
575 frame_offset -= size;
576 #endif
577
578 /* If we have already instantiated virtual registers, return the actual
579 address relative to the frame pointer. */
580 if (virtuals_instantiated)
581 addr = plus_constant (frame_pointer_rtx,
582 (frame_offset + bigend_correction
583 + STARTING_FRAME_OFFSET));
584 else
585 addr = plus_constant (virtual_stack_vars_rtx,
586 frame_offset + bigend_correction);
587
588 #ifndef FRAME_GROWS_DOWNWARD
589 frame_offset += size;
590 #endif
591
592 x = gen_rtx (MEM, mode, addr);
593
594 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
595
596 return x;
597 }
598
599 /* Assign a stack slot in a containing function.
600 First three arguments are same as in preceding function.
601 The last argument specifies the function to allocate in. */
602
603 rtx
604 assign_outer_stack_local (mode, size, align, function)
605 enum machine_mode mode;
606 int size;
607 int align;
608 struct function *function;
609 {
610 register rtx x, addr;
611 int bigend_correction = 0;
612 int alignment;
613
614 /* Allocate in the memory associated with the function in whose frame
615 we are assigning. */
616 push_obstacks (function->function_obstack,
617 function->function_maybepermanent_obstack);
618
619 if (align == 0)
620 {
621 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
622 if (mode == BLKmode)
623 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
624 }
625 else if (align == -1)
626 {
627 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
628 size = CEIL_ROUND (size, alignment);
629 }
630 else
631 alignment = align / BITS_PER_UNIT;
632
633 /* Round frame offset to that alignment. */
634 #ifdef FRAME_GROWS_DOWNWARD
635 frame_offset = FLOOR_ROUND (frame_offset, alignment);
636 #else
637 frame_offset = CEIL_ROUND (frame_offset, alignment);
638 #endif
639
640 /* On a big-endian machine, if we are allocating more space than we will use,
641 use the least significant bytes of those that are allocated. */
642 #if BYTES_BIG_ENDIAN
643 if (mode != BLKmode)
644 bigend_correction = size - GET_MODE_SIZE (mode);
645 #endif
646
647 #ifdef FRAME_GROWS_DOWNWARD
648 function->frame_offset -= size;
649 #endif
650 addr = plus_constant (virtual_stack_vars_rtx,
651 function->frame_offset + bigend_correction);
652 #ifndef FRAME_GROWS_DOWNWARD
653 function->frame_offset += size;
654 #endif
655
656 x = gen_rtx (MEM, mode, addr);
657
658 function->stack_slot_list
659 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
660
661 pop_obstacks ();
662
663 return x;
664 }
665 \f
666 /* Allocate a temporary stack slot and record it for possible later
667 reuse.
668
669 MODE is the machine mode to be given to the returned rtx.
670
671 SIZE is the size in units of the space required. We do no rounding here
672 since assign_stack_local will do any required rounding.
673
674 KEEP is non-zero if this slot is to be retained after a call to
675 free_temp_slots. Automatic variables for a block are allocated with this
676 flag. */
677
678 rtx
679 assign_stack_temp (mode, size, keep)
680 enum machine_mode mode;
681 int size;
682 int keep;
683 {
684 struct temp_slot *p, *best_p = 0;
685
686 /* First try to find an available, already-allocated temporary that is the
687 exact size we require. */
688 for (p = temp_slots; p; p = p->next)
689 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
690 break;
691
692 /* If we didn't find, one, try one that is larger than what we want. We
693 find the smallest such. */
694 if (p == 0)
695 for (p = temp_slots; p; p = p->next)
696 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
697 && (best_p == 0 || best_p->size > p->size))
698 best_p = p;
699
700 /* Make our best, if any, the one to use. */
701 if (best_p)
702 p = best_p;
703
704 /* If we still didn't find one, make a new temporary. */
705 if (p == 0)
706 {
707 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
708 p->size = size;
709 /* If the temp slot mode doesn't indicate the alignment,
710 use the largest possible, so no one will be disappointed. */
711 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
712 p->next = temp_slots;
713 temp_slots = p;
714 }
715
716 p->in_use = 1;
717 p->level = temp_slot_level;
718 p->keep = keep;
719 return p->slot;
720 }
721 \f
722 /* If X could be a reference to a temporary slot, mark that slot as belonging
723 to the to one level higher. If X matched one of our slots, just mark that
724 one. Otherwise, we can't easily predict which it is, so upgrade all of
725 them. Kept slots need not be touched.
726
727 This is called when an ({...}) construct occurs and a statement
728 returns a value in memory. */
729
730 void
731 preserve_temp_slots (x)
732 rtx x;
733 {
734 struct temp_slot *p;
735
736 /* If X is not in memory or is at a constant address, it cannot be in
737 a temporary slot. */
738 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
739 return;
740
741 /* First see if we can find a match. */
742 for (p = temp_slots; p; p = p->next)
743 if (p->in_use && x == p->slot)
744 {
745 p->level--;
746 return;
747 }
748
749 /* Otherwise, preserve all non-kept slots at this level. */
750 for (p = temp_slots; p; p = p->next)
751 if (p->in_use && p->level == temp_slot_level && ! p->keep)
752 p->level--;
753 }
754
755 /* Free all temporaries used so far. This is normally called at the end
756 of generating code for a statement. */
757
758 void
759 free_temp_slots ()
760 {
761 struct temp_slot *p;
762
763 for (p = temp_slots; p; p = p->next)
764 if (p->in_use && p->level == temp_slot_level && ! p->keep)
765 p->in_use = 0;
766 }
767
768 /* Push deeper into the nesting level for stack temporaries. */
769
770 void
771 push_temp_slots ()
772 {
773 /* For GNU C++, we must allow a sequence to be emitted anywhere in
774 the level where the sequence was started. By not changing levels
775 when the compiler is inside a sequence, the temporaries for the
776 sequence and the temporaries will not unwittingly conflict with
777 the temporaries for other sequences and/or code at that level. */
778 if (in_sequence_p ())
779 return;
780
781 temp_slot_level++;
782 }
783
784 /* Pop a temporary nesting level. All slots in use in the current level
785 are freed. */
786
787 void
788 pop_temp_slots ()
789 {
790 struct temp_slot *p;
791
792 /* See comment in push_temp_slots about why we don't change levels
793 in sequences. */
794 if (in_sequence_p ())
795 return;
796
797 for (p = temp_slots; p; p = p->next)
798 if (p->in_use && p->level == temp_slot_level)
799 p->in_use = 0;
800
801 temp_slot_level--;
802 }
803 \f
804 /* Retroactively move an auto variable from a register to a stack slot.
805 This is done when an address-reference to the variable is seen. */
806
807 void
808 put_var_into_stack (decl)
809 tree decl;
810 {
811 register rtx reg;
812 register rtx new = 0;
813 struct function *function = 0;
814 tree context = decl_function_context (decl);
815
816 /* Get the current rtl used for this object. */
817 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
818
819 /* If this variable comes from an outer function,
820 find that function's saved context. */
821 if (context != current_function_decl)
822 for (function = outer_function_chain; function; function = function->next)
823 if (function->decl == context)
824 break;
825
826 /* No need to do anything if decl has no rtx yet
827 since in that case caller is setting TREE_ADDRESSABLE
828 and a stack slot will be assigned when the rtl is made. */
829 if (reg == 0)
830 return;
831
832 /* If this is a variable-size object with a pseudo to address it,
833 put that pseudo into the stack, if the var is nonlocal. */
834 if (TREE_NONLOCAL (decl)
835 && GET_CODE (reg) == MEM
836 && GET_CODE (XEXP (reg, 0)) == REG
837 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
838 reg = XEXP (reg, 0);
839 if (GET_CODE (reg) != REG)
840 return;
841
842 if (function)
843 {
844 if (REGNO (reg) < function->max_parm_reg)
845 new = function->parm_reg_stack_loc[REGNO (reg)];
846 if (new == 0)
847 new = assign_outer_stack_local (GET_MODE (reg),
848 GET_MODE_SIZE (GET_MODE (reg)),
849 0, function);
850 }
851 else
852 {
853 if (REGNO (reg) < max_parm_reg)
854 new = parm_reg_stack_loc[REGNO (reg)];
855 if (new == 0)
856 new = assign_stack_local (GET_MODE (reg),
857 GET_MODE_SIZE (GET_MODE (reg)),
858 0);
859 }
860
861 XEXP (reg, 0) = XEXP (new, 0);
862 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
863 REG_USERVAR_P (reg) = 0;
864 PUT_CODE (reg, MEM);
865
866 /* If this is a memory ref that contains aggregate components,
867 mark it as such for cse and loop optimize. */
868 MEM_IN_STRUCT_P (reg)
869 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
870 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
871 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE);
872
873 /* Now make sure that all refs to the variable, previously made
874 when it was a register, are fixed up to be valid again. */
875 if (function)
876 {
877 struct var_refs_queue *temp;
878
879 /* Variable is inherited; fix it up when we get back to its function. */
880 push_obstacks (function->function_obstack,
881 function->function_maybepermanent_obstack);
882 temp
883 = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue));
884 temp->modified = reg;
885 temp->next = function->fixup_var_refs_queue;
886 function->fixup_var_refs_queue = temp;
887 pop_obstacks ();
888 }
889 else
890 /* Variable is local; fix it up now. */
891 fixup_var_refs (reg);
892 }
893 \f
894 static void
895 fixup_var_refs (var)
896 rtx var;
897 {
898 tree pending;
899 rtx first_insn = get_insns ();
900 struct sequence_stack *stack = sequence_stack;
901 tree rtl_exps = rtl_expr_chain;
902
903 /* Must scan all insns for stack-refs that exceed the limit. */
904 fixup_var_refs_insns (var, first_insn, stack == 0);
905
906 /* Scan all pending sequences too. */
907 for (; stack; stack = stack->next)
908 {
909 push_to_sequence (stack->first);
910 fixup_var_refs_insns (var, stack->first, stack->next != 0);
911 /* Update remembered end of sequence
912 in case we added an insn at the end. */
913 stack->last = get_last_insn ();
914 end_sequence ();
915 }
916
917 /* Scan all waiting RTL_EXPRs too. */
918 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
919 {
920 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
921 if (seq != const0_rtx && seq != 0)
922 {
923 push_to_sequence (seq);
924 fixup_var_refs_insns (var, seq, 0);
925 end_sequence ();
926 }
927 }
928 }
929 \f
930 /* This structure is used by the following two functions to record MEMs or
931 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
932 VAR as an address. We need to maintain this list in case two operands of
933 an insn were required to match; in that case we must ensure we use the
934 same replacement. */
935
936 struct fixup_replacement
937 {
938 rtx old;
939 rtx new;
940 struct fixup_replacement *next;
941 };
942
943 /* REPLACEMENTS is a pointer to a list of the above structures and X is
944 some part of an insn. Return a struct fixup_replacement whose OLD
945 value is equal to X. Allocate a new structure if no such entry exists. */
946
947 static struct fixup_replacement *
948 find_replacement (replacements, x)
949 struct fixup_replacement **replacements;
950 rtx x;
951 {
952 struct fixup_replacement *p;
953
954 /* See if we have already replaced this. */
955 for (p = *replacements; p && p->old != x; p = p->next)
956 ;
957
958 if (p == 0)
959 {
960 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
961 p->old = x;
962 p->new = 0;
963 p->next = *replacements;
964 *replacements = p;
965 }
966
967 return p;
968 }
969
970 /* Scan the insn-chain starting with INSN for refs to VAR
971 and fix them up. TOPLEVEL is nonzero if this chain is the
972 main chain of insns for the current function. */
973
974 static void
975 fixup_var_refs_insns (var, insn, toplevel)
976 rtx var;
977 rtx insn;
978 int toplevel;
979 {
980 while (insn)
981 {
982 rtx next = NEXT_INSN (insn);
983 rtx note;
984 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
985 || GET_CODE (insn) == JUMP_INSN)
986 {
987 /* The insn to load VAR from a home in the arglist
988 is now a no-op. When we see it, just delete it. */
989 if (toplevel
990 && GET_CODE (PATTERN (insn)) == SET
991 && SET_DEST (PATTERN (insn)) == var
992 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
993 {
994 next = delete_insn (insn);
995 if (insn == last_parm_insn)
996 last_parm_insn = PREV_INSN (next);
997 }
998 else
999 {
1000 /* See if we have to do anything to INSN now that VAR is in
1001 memory. If it needs to be loaded into a pseudo, use a single
1002 pseudo for the entire insn in case there is a MATCH_DUP
1003 between two operands. We pass a pointer to the head of
1004 a list of struct fixup_replacements. If fixup_var_refs_1
1005 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1006 it will record them in this list.
1007
1008 If it allocated a pseudo for any replacement, we copy into
1009 it here. */
1010
1011 struct fixup_replacement *replacements = 0;
1012
1013 fixup_var_refs_1 (var, &PATTERN (insn), insn, &replacements);
1014
1015 while (replacements)
1016 {
1017 if (GET_CODE (replacements->new) == REG)
1018 {
1019 rtx insert_before;
1020
1021 /* OLD might be a (subreg (mem)). */
1022 if (GET_CODE (replacements->old) == SUBREG)
1023 replacements->old
1024 = fixup_memory_subreg (replacements->old, insn, 0);
1025 else
1026 replacements->old
1027 = fixup_stack_1 (replacements->old, insn);
1028
1029 /* We can not separate USE insns from the CALL_INSN
1030 that they belong to. If this is a CALL_INSN, insert
1031 the move insn before the USE insns preceding it
1032 instead of immediately before the insn. */
1033 if (GET_CODE (insn) == CALL_INSN)
1034 {
1035 insert_before = insn;
1036 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1037 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1038 insert_before = PREV_INSN (insert_before);
1039 }
1040 else
1041 insert_before = insn;
1042
1043 emit_insn_before (gen_move_insn (replacements->new,
1044 replacements->old),
1045 insert_before);
1046 }
1047
1048 replacements = replacements->next;
1049 }
1050 }
1051
1052 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1053 But don't touch other insns referred to by reg-notes;
1054 we will get them elsewhere. */
1055 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1056 if (GET_CODE (note) != INSN_LIST)
1057 XEXP (note, 0) = walk_fixup_memory_subreg (XEXP (note, 0), insn);
1058 }
1059 insn = next;
1060 }
1061 }
1062 \f
1063 /* VAR is a MEM that used to be a pseudo register. See if the rtx expression
1064 at *LOC in INSN needs to be changed.
1065
1066 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1067 contain a list of original rtx's and replacements. If we find that we need
1068 to modify this insn by replacing a memory reference with a pseudo or by
1069 making a new MEM to implement a SUBREG, we consult that list to see if
1070 we have already chosen a replacement. If none has already been allocated,
1071 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1072 or the SUBREG, as appropriate, to the pseudo. */
1073
1074 static void
1075 fixup_var_refs_1 (var, loc, insn, replacements)
1076 register rtx var;
1077 register rtx *loc;
1078 rtx insn;
1079 struct fixup_replacement **replacements;
1080 {
1081 register int i;
1082 register rtx x = *loc;
1083 RTX_CODE code = GET_CODE (x);
1084 register char *fmt;
1085 register rtx tem, tem1;
1086 struct fixup_replacement *replacement;
1087
1088 switch (code)
1089 {
1090 case MEM:
1091 if (var == x)
1092 {
1093 /* If we already have a replacement, use it. Otherwise,
1094 try to fix up this address in case it is invalid. */
1095
1096 replacement = find_replacement (replacements, var);
1097 if (replacement->new)
1098 {
1099 *loc = replacement->new;
1100 return;
1101 }
1102
1103 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1104
1105 /* Unless we are forcing memory to register, we can leave things
1106 the way they are if the insn is valid. */
1107
1108 INSN_CODE (insn) = -1;
1109 if (! flag_force_mem && recog_memoized (insn) >= 0)
1110 return;
1111
1112 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1113 return;
1114 }
1115
1116 /* If X contains VAR, we need to unshare it here so that we update
1117 each occurrence separately. But all identical MEMs in one insn
1118 must be replaced with the same rtx because of the possibility of
1119 MATCH_DUPs. */
1120
1121 if (reg_mentioned_p (var, x))
1122 {
1123 replacement = find_replacement (replacements, x);
1124 if (replacement->new == 0)
1125 replacement->new = copy_most_rtx (x, var);
1126
1127 *loc = x = replacement->new;
1128 }
1129 break;
1130
1131 case REG:
1132 case CC0:
1133 case PC:
1134 case CONST_INT:
1135 case CONST:
1136 case SYMBOL_REF:
1137 case LABEL_REF:
1138 case CONST_DOUBLE:
1139 return;
1140
1141 case SIGN_EXTRACT:
1142 case ZERO_EXTRACT:
1143 /* Note that in some cases those types of expressions are altered
1144 by optimize_bit_field, and do not survive to get here. */
1145 if (XEXP (x, 0) == var
1146 || (GET_CODE (XEXP (x, 0)) == SUBREG
1147 && SUBREG_REG (XEXP (x, 0)) == var))
1148 {
1149 /* Get TEM as a valid MEM in the mode presently in the insn.
1150
1151 We don't worry about the possibility of MATCH_DUP here; it
1152 is highly unlikely and would be tricky to handle. */
1153
1154 tem = XEXP (x, 0);
1155 if (GET_CODE (tem) == SUBREG)
1156 tem = fixup_memory_subreg (tem, insn, 1);
1157 tem = fixup_stack_1 (tem, insn);
1158
1159 /* Unless we want to load from memory, get TEM into the proper mode
1160 for an extract from memory. This can only be done if the
1161 extract is at a constant position and length. */
1162
1163 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1164 && GET_CODE (XEXP (x, 2)) == CONST_INT
1165 && ! mode_dependent_address_p (XEXP (tem, 0))
1166 && ! MEM_VOLATILE_P (tem))
1167 {
1168 enum machine_mode wanted_mode = VOIDmode;
1169 enum machine_mode is_mode = GET_MODE (tem);
1170 int width = INTVAL (XEXP (x, 1));
1171 int pos = INTVAL (XEXP (x, 2));
1172
1173 #ifdef HAVE_extzv
1174 if (GET_CODE (x) == ZERO_EXTRACT)
1175 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1176 #endif
1177 #ifdef HAVE_extv
1178 if (GET_CODE (x) == SIGN_EXTRACT)
1179 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1180 #endif
1181 /* If we have a narrower mode, we can do someting. */
1182 if (wanted_mode != VOIDmode
1183 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1184 {
1185 int offset = pos / BITS_PER_UNIT;
1186 rtx old_pos = XEXP (x, 2);
1187 rtx newmem;
1188
1189 /* If the bytes and bits are counted differently, we
1190 must adjust the offset. */
1191 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1192 offset = (GET_MODE_SIZE (is_mode)
1193 - GET_MODE_SIZE (wanted_mode) - offset);
1194 #endif
1195
1196 pos %= GET_MODE_BITSIZE (wanted_mode);
1197
1198 newmem = gen_rtx (MEM, wanted_mode,
1199 plus_constant (XEXP (tem, 0), offset));
1200 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1201 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1202 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1203
1204 /* Make the change and see if the insn remains valid. */
1205 INSN_CODE (insn) = -1;
1206 XEXP (x, 0) = newmem;
1207 XEXP (x, 2) = gen_rtx (CONST_INT, VOIDmode, pos);
1208
1209 if (recog_memoized (insn) >= 0)
1210 return;
1211
1212 /* Otherwise, restore old position. XEXP (x, 0) will be
1213 restored later. */
1214 XEXP (x, 2) = old_pos;
1215 }
1216 }
1217
1218 /* If we get here, the bitfield extract insn can't accept a memory
1219 reference. Copy the input into a register. */
1220
1221 tem1 = gen_reg_rtx (GET_MODE (tem));
1222 emit_insn_before (gen_move_insn (tem1, tem), insn);
1223 XEXP (x, 0) = tem1;
1224 return;
1225 }
1226 break;
1227
1228 case SUBREG:
1229 if (SUBREG_REG (x) == var)
1230 {
1231 /* If this SUBREG makes VAR wider, it has become a paradoxical
1232 SUBREG with VAR in memory, but these aren't allowed at this
1233 stage of the compilation. So load VAR into a pseudo and take
1234 a SUBREG of that pseudo. */
1235 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1236 {
1237 replacement = find_replacement (replacements, var);
1238 if (replacement->new == 0)
1239 replacement->new = gen_reg_rtx (GET_MODE (var));
1240 SUBREG_REG (x) = replacement->new;
1241 return;
1242 }
1243
1244 /* See if we have already found a replacement for this SUBREG.
1245 If so, use it. Otherwise, make a MEM and see if the insn
1246 is recognized. If not, or if we should force MEM into a register,
1247 make a pseudo for this SUBREG. */
1248 replacement = find_replacement (replacements, x);
1249 if (replacement->new)
1250 {
1251 *loc = replacement->new;
1252 return;
1253 }
1254
1255 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1256
1257 if (! flag_force_mem && recog_memoized (insn) >= 0)
1258 return;
1259
1260 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1261 return;
1262 }
1263 break;
1264
1265 case SET:
1266 /* First do special simplification of bit-field references. */
1267 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1268 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1269 optimize_bit_field (x, insn, 0);
1270 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1271 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1272 optimize_bit_field (x, insn, 0);
1273
1274 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1275 insn into a pseudo and store the low part of the pseudo into VAR. */
1276 if (GET_CODE (SET_DEST (x)) == SUBREG
1277 && SUBREG_REG (SET_DEST (x)) == var
1278 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1279 > GET_MODE_SIZE (GET_MODE (var))))
1280 {
1281 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1282 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1283 tem)),
1284 insn);
1285 break;
1286 }
1287
1288 {
1289 rtx dest = SET_DEST (x);
1290 rtx src = SET_SRC (x);
1291 rtx outerdest = dest;
1292
1293 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1294 || GET_CODE (dest) == SIGN_EXTRACT
1295 || GET_CODE (dest) == ZERO_EXTRACT)
1296 dest = XEXP (dest, 0);
1297
1298 if (GET_CODE (src) == SUBREG)
1299 src = XEXP (src, 0);
1300
1301 /* If VAR does not appear at the top level of the SET
1302 just scan the lower levels of the tree. */
1303
1304 if (src != var && dest != var)
1305 break;
1306
1307 /* We will need to rerecognize this insn. */
1308 INSN_CODE (insn) = -1;
1309
1310 #ifdef HAVE_insv
1311 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1312 {
1313 /* Since this case will return, ensure we fixup all the
1314 operands here. */
1315 fixup_var_refs_1 (var, &XEXP (outerdest, 1), insn, replacements);
1316 fixup_var_refs_1 (var, &XEXP (outerdest, 2), insn, replacements);
1317 fixup_var_refs_1 (var, &SET_SRC (x), insn, replacements);
1318
1319 tem = XEXP (outerdest, 0);
1320
1321 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1322 that may appear inside a ZERO_EXTRACT.
1323 This was legitimate when the MEM was a REG. */
1324 if (GET_CODE (tem) == SUBREG
1325 && SUBREG_REG (tem) == var)
1326 tem = fixup_memory_subreg (tem, insn, 1);
1327 else
1328 tem = fixup_stack_1 (tem, insn);
1329
1330 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1331 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1332 && ! mode_dependent_address_p (XEXP (tem, 0))
1333 && ! MEM_VOLATILE_P (tem))
1334 {
1335 enum machine_mode wanted_mode
1336 = insn_operand_mode[(int) CODE_FOR_insv][0];
1337 enum machine_mode is_mode = GET_MODE (tem);
1338 int width = INTVAL (XEXP (outerdest, 1));
1339 int pos = INTVAL (XEXP (outerdest, 2));
1340
1341 /* If we have a narrower mode, we can do someting. */
1342 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1343 {
1344 int offset = pos / BITS_PER_UNIT;
1345 rtx old_pos = XEXP (outerdest, 2);
1346 rtx newmem;
1347
1348 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1349 offset = (GET_MODE_SIZE (is_mode)
1350 - GET_MODE_SIZE (wanted_mode) - offset);
1351 #endif
1352
1353 pos %= GET_MODE_BITSIZE (wanted_mode);
1354
1355 newmem = gen_rtx (MEM, wanted_mode,
1356 plus_constant (XEXP (tem, 0), offset));
1357 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1358 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1359 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1360
1361 /* Make the change and see if the insn remains valid. */
1362 INSN_CODE (insn) = -1;
1363 XEXP (outerdest, 0) = newmem;
1364 XEXP (outerdest, 2) = gen_rtx (CONST_INT, VOIDmode, pos);
1365
1366 if (recog_memoized (insn) >= 0)
1367 return;
1368
1369 /* Otherwise, restore old position. XEXP (x, 0) will be
1370 restored later. */
1371 XEXP (outerdest, 2) = old_pos;
1372 }
1373 }
1374
1375 /* If we get here, the bit-field store doesn't allow memory
1376 or isn't located at a constant position. Load the value into
1377 a register, do the store, and put it back into memory. */
1378
1379 tem1 = gen_reg_rtx (GET_MODE (tem));
1380 emit_insn_before (gen_move_insn (tem1, tem), insn);
1381 emit_insn_after (gen_move_insn (tem, tem1), insn);
1382 XEXP (outerdest, 0) = tem1;
1383 return;
1384 }
1385 #endif
1386
1387 /* STRICT_LOW_PART is a no-op on memory references
1388 and it can cause combinations to be unrecognizable,
1389 so eliminate it. */
1390
1391 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1392 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1393
1394 /* A valid insn to copy VAR into or out of a register
1395 must be left alone, to avoid an infinite loop here.
1396 If the reference to VAR is by a subreg, fix that up,
1397 since SUBREG is not valid for a memref.
1398 Also fix up the address of the stack slot. */
1399
1400 if ((SET_SRC (x) == var
1401 || (GET_CODE (SET_SRC (x)) == SUBREG
1402 && SUBREG_REG (SET_SRC (x)) == var))
1403 && (GET_CODE (SET_DEST (x)) == REG
1404 || (GET_CODE (SET_DEST (x)) == SUBREG
1405 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1406 && recog_memoized (insn) >= 0)
1407 {
1408 replacement = find_replacement (replacements, SET_SRC (x));
1409 if (replacement->new)
1410 {
1411 SET_SRC (x) = replacement->new;
1412 return;
1413 }
1414 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1415 SET_SRC (x) = replacement->new
1416 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1417 else
1418 SET_SRC (x) = replacement->new
1419 = fixup_stack_1 (SET_SRC (x), insn);
1420 return;
1421 }
1422
1423 if ((SET_DEST (x) == var
1424 || (GET_CODE (SET_DEST (x)) == SUBREG
1425 && SUBREG_REG (SET_DEST (x)) == var))
1426 && (GET_CODE (SET_SRC (x)) == REG
1427 || (GET_CODE (SET_SRC (x)) == SUBREG
1428 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1429 && recog_memoized (insn) >= 0)
1430 {
1431 if (GET_CODE (SET_DEST (x)) == SUBREG)
1432 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1433 else
1434 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1435 return;
1436 }
1437
1438 /* Otherwise, storing into VAR must be handled specially
1439 by storing into a temporary and copying that into VAR
1440 with a new insn after this one. */
1441
1442 if (dest == var)
1443 {
1444 rtx temp;
1445 rtx fixeddest;
1446 tem = SET_DEST (x);
1447 /* STRICT_LOW_PART can be discarded, around a MEM. */
1448 if (GET_CODE (tem) == STRICT_LOW_PART)
1449 tem = XEXP (tem, 0);
1450 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
1451 if (GET_CODE (tem) == SUBREG)
1452 fixeddest = fixup_memory_subreg (tem, insn, 0);
1453 else
1454 fixeddest = fixup_stack_1 (tem, insn);
1455
1456 temp = gen_reg_rtx (GET_MODE (tem));
1457 emit_insn_after (gen_move_insn (fixeddest, temp), insn);
1458 SET_DEST (x) = temp;
1459 }
1460 }
1461 }
1462
1463 /* Nothing special about this RTX; fix its operands. */
1464
1465 fmt = GET_RTX_FORMAT (code);
1466 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1467 {
1468 if (fmt[i] == 'e')
1469 fixup_var_refs_1 (var, &XEXP (x, i), insn, replacements);
1470 if (fmt[i] == 'E')
1471 {
1472 register int j;
1473 for (j = 0; j < XVECLEN (x, i); j++)
1474 fixup_var_refs_1 (var, &XVECEXP (x, i, j), insn, replacements);
1475 }
1476 }
1477 }
1478 \f
1479 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1480 return an rtx (MEM:m1 newaddr) which is equivalent.
1481 If any insns must be emitted to compute NEWADDR, put them before INSN.
1482
1483 UNCRITICAL nonzero means accept paradoxical subregs.
1484 This is used for subregs found inside of ZERO_EXTRACTs. */
1485
1486 static rtx
1487 fixup_memory_subreg (x, insn, uncritical)
1488 rtx x;
1489 rtx insn;
1490 int uncritical;
1491 {
1492 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1493 rtx addr = XEXP (SUBREG_REG (x), 0);
1494 enum machine_mode mode = GET_MODE (x);
1495 rtx saved, result;
1496
1497 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1498 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1499 && ! uncritical)
1500 abort ();
1501
1502 #if BYTES_BIG_ENDIAN
1503 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1504 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1505 #endif
1506 addr = plus_constant (addr, offset);
1507 if (!flag_force_addr && memory_address_p (mode, addr))
1508 /* Shortcut if no insns need be emitted. */
1509 return change_address (SUBREG_REG (x), mode, addr);
1510 start_sequence ();
1511 result = change_address (SUBREG_REG (x), mode, addr);
1512 emit_insn_before (gen_sequence (), insn);
1513 end_sequence ();
1514 return result;
1515 }
1516
1517 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1518 Replace subexpressions of X in place.
1519 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1520 Otherwise return X, with its contents possibly altered.
1521
1522 If any insns must be emitted to compute NEWADDR, put them before INSN. */
1523
1524 static rtx
1525 walk_fixup_memory_subreg (x, insn)
1526 register rtx x;
1527 rtx insn;
1528 {
1529 register enum rtx_code code;
1530 register char *fmt;
1531 register int i;
1532
1533 if (x == 0)
1534 return 0;
1535
1536 code = GET_CODE (x);
1537
1538 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
1539 return fixup_memory_subreg (x, insn, 0);
1540
1541 /* Nothing special about this RTX; fix its operands. */
1542
1543 fmt = GET_RTX_FORMAT (code);
1544 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1545 {
1546 if (fmt[i] == 'e')
1547 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn);
1548 if (fmt[i] == 'E')
1549 {
1550 register int j;
1551 for (j = 0; j < XVECLEN (x, i); j++)
1552 XVECEXP (x, i, j)
1553 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn);
1554 }
1555 }
1556 return x;
1557 }
1558 \f
1559 #if 0
1560 /* Fix up any references to stack slots that are invalid memory addresses
1561 because they exceed the maximum range of a displacement. */
1562
1563 void
1564 fixup_stack_slots ()
1565 {
1566 register rtx insn;
1567
1568 /* Did we generate a stack slot that is out of range
1569 or otherwise has an invalid address? */
1570 if (invalid_stack_slot)
1571 {
1572 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1573 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1574 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1575 || GET_CODE (insn) == JUMP_INSN)
1576 fixup_stack_1 (PATTERN (insn), insn);
1577 }
1578 }
1579 #endif
1580
1581 /* For each memory ref within X, if it refers to a stack slot
1582 with an out of range displacement, put the address in a temp register
1583 (emitting new insns before INSN to load these registers)
1584 and alter the memory ref to use that register.
1585 Replace each such MEM rtx with a copy, to avoid clobberage. */
1586
1587 static rtx
1588 fixup_stack_1 (x, insn)
1589 rtx x;
1590 rtx insn;
1591 {
1592 register int i;
1593 register RTX_CODE code = GET_CODE (x);
1594 register char *fmt;
1595
1596 if (code == MEM)
1597 {
1598 register rtx ad = XEXP (x, 0);
1599 /* If we have address of a stack slot but it's not valid
1600 (displacement is too large), compute the sum in a register. */
1601 if (GET_CODE (ad) == PLUS
1602 && GET_CODE (XEXP (ad, 0)) == REG
1603 && REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1604 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER
1605 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1606 {
1607 rtx temp, seq;
1608 if (memory_address_p (GET_MODE (x), ad))
1609 return x;
1610
1611 start_sequence ();
1612 temp = copy_to_reg (ad);
1613 seq = gen_sequence ();
1614 end_sequence ();
1615 emit_insn_before (seq, insn);
1616 return change_address (x, VOIDmode, temp);
1617 }
1618 return x;
1619 }
1620
1621 fmt = GET_RTX_FORMAT (code);
1622 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1623 {
1624 if (fmt[i] == 'e')
1625 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1626 if (fmt[i] == 'E')
1627 {
1628 register int j;
1629 for (j = 0; j < XVECLEN (x, i); j++)
1630 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
1631 }
1632 }
1633 return x;
1634 }
1635 \f
1636 /* Optimization: a bit-field instruction whose field
1637 happens to be a byte or halfword in memory
1638 can be changed to a move instruction.
1639
1640 We call here when INSN is an insn to examine or store into a bit-field.
1641 BODY is the SET-rtx to be altered.
1642
1643 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1644 (Currently this is called only from function.c, and EQUIV_MEM
1645 is always 0.) */
1646
1647 static void
1648 optimize_bit_field (body, insn, equiv_mem)
1649 rtx body;
1650 rtx insn;
1651 rtx *equiv_mem;
1652 {
1653 register rtx bitfield;
1654 int destflag;
1655 rtx seq = 0;
1656 enum machine_mode mode;
1657
1658 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
1659 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
1660 bitfield = SET_DEST (body), destflag = 1;
1661 else
1662 bitfield = SET_SRC (body), destflag = 0;
1663
1664 /* First check that the field being stored has constant size and position
1665 and is in fact a byte or halfword suitably aligned. */
1666
1667 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
1668 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
1669 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
1670 != BLKmode)
1671 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
1672 {
1673 register rtx memref = 0;
1674
1675 /* Now check that the containing word is memory, not a register,
1676 and that it is safe to change the machine mode. */
1677
1678 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
1679 memref = XEXP (bitfield, 0);
1680 else if (GET_CODE (XEXP (bitfield, 0)) == REG
1681 && equiv_mem != 0)
1682 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
1683 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1684 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
1685 memref = SUBREG_REG (XEXP (bitfield, 0));
1686 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1687 && equiv_mem != 0
1688 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
1689 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
1690
1691 if (memref
1692 && ! mode_dependent_address_p (XEXP (memref, 0))
1693 && ! MEM_VOLATILE_P (memref))
1694 {
1695 /* Now adjust the address, first for any subreg'ing
1696 that we are now getting rid of,
1697 and then for which byte of the word is wanted. */
1698
1699 register int offset = INTVAL (XEXP (bitfield, 2));
1700 /* Adjust OFFSET to count bits from low-address byte. */
1701 #if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1702 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
1703 - offset - INTVAL (XEXP (bitfield, 1)));
1704 #endif
1705 /* Adjust OFFSET to count bytes from low-address byte. */
1706 offset /= BITS_PER_UNIT;
1707 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
1708 {
1709 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
1710 #if BYTES_BIG_ENDIAN
1711 offset -= (MIN (UNITS_PER_WORD,
1712 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
1713 - MIN (UNITS_PER_WORD,
1714 GET_MODE_SIZE (GET_MODE (memref))));
1715 #endif
1716 }
1717
1718 memref = change_address (memref, mode,
1719 plus_constant (XEXP (memref, 0), offset));
1720
1721 /* Store this memory reference where
1722 we found the bit field reference. */
1723
1724 if (destflag)
1725 {
1726 validate_change (insn, &SET_DEST (body), memref, 1);
1727 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
1728 {
1729 rtx src = SET_SRC (body);
1730 while (GET_CODE (src) == SUBREG
1731 && SUBREG_WORD (src) == 0)
1732 src = SUBREG_REG (src);
1733 if (GET_MODE (src) != GET_MODE (memref))
1734 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
1735 validate_change (insn, &SET_SRC (body), src, 1);
1736 }
1737 else if (GET_MODE (SET_SRC (body)) != VOIDmode
1738 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
1739 /* This shouldn't happen because anything that didn't have
1740 one of these modes should have got converted explicitly
1741 and then referenced through a subreg.
1742 This is so because the original bit-field was
1743 handled by agg_mode and so its tree structure had
1744 the same mode that memref now has. */
1745 abort ();
1746 }
1747 else
1748 {
1749 rtx dest = SET_DEST (body);
1750
1751 while (GET_CODE (dest) == SUBREG
1752 && SUBREG_WORD (dest) == 0)
1753 dest = SUBREG_REG (dest);
1754
1755 validate_change (insn, &SET_DEST (body), dest, 1);
1756
1757 if (GET_MODE (dest) == GET_MODE (memref))
1758 validate_change (insn, &SET_SRC (body), memref, 1);
1759 else
1760 {
1761 /* Convert the mem ref to the destination mode. */
1762 rtx newreg = gen_reg_rtx (GET_MODE (dest));
1763
1764 start_sequence ();
1765 convert_move (newreg, memref,
1766 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
1767 seq = get_insns ();
1768 end_sequence ();
1769
1770 validate_change (insn, &SET_SRC (body), newreg, 1);
1771 }
1772 }
1773
1774 /* See if we can convert this extraction or insertion into
1775 a simple move insn. We might not be able to do so if this
1776 was, for example, part of a PARALLEL.
1777
1778 If we succeed, write out any needed conversions. If we fail,
1779 it is hard to guess why we failed, so don't do anything
1780 special; just let the optimization be suppressed. */
1781
1782 if (apply_change_group () && seq)
1783 emit_insns_before (seq, insn);
1784 }
1785 }
1786 }
1787 \f
1788 /* These routines are responsible for converting virtual register references
1789 to the actual hard register references once RTL generation is complete.
1790
1791 The following four variables are used for communication between the
1792 routines. They contain the offsets of the virtual registers from their
1793 respective hard registers. */
1794
1795 static int in_arg_offset;
1796 static int var_offset;
1797 static int dynamic_offset;
1798 static int out_arg_offset;
1799
1800 /* In most machines, the stack pointer register is equivalent to the bottom
1801 of the stack. */
1802
1803 #ifndef STACK_POINTER_OFFSET
1804 #define STACK_POINTER_OFFSET 0
1805 #endif
1806
1807 /* If not defined, pick an appropriate default for the offset of dynamically
1808 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1809 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1810
1811 #ifndef STACK_DYNAMIC_OFFSET
1812
1813 #ifdef ACCUMULATE_OUTGOING_ARGS
1814 /* The bottom of the stack points to the actual arguments. If
1815 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1816 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1817 stack space for register parameters is not pushed by the caller, but
1818 rather part of the fixed stack areas and hence not included in
1819 `current_function_outgoing_args_size'. Nevertheless, we must allow
1820 for it when allocating stack dynamic objects. */
1821
1822 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1823 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1824 (current_function_outgoing_args_size \
1825 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
1826
1827 #else
1828 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1829 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
1830 #endif
1831
1832 #else
1833 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
1834 #endif
1835 #endif
1836
1837 /* Pass through the INSNS of function FNDECL and convert virtual register
1838 references to hard register references. */
1839
1840 void
1841 instantiate_virtual_regs (fndecl, insns)
1842 tree fndecl;
1843 rtx insns;
1844 {
1845 rtx insn;
1846
1847 /* Compute the offsets to use for this function. */
1848 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
1849 var_offset = STARTING_FRAME_OFFSET;
1850 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
1851 out_arg_offset = STACK_POINTER_OFFSET;
1852
1853 /* Scan all variables and parameters of this function. For each that is
1854 in memory, instantiate all virtual registers if the result is a valid
1855 address. If not, we do it later. That will handle most uses of virtual
1856 regs on many machines. */
1857 instantiate_decls (fndecl, 1);
1858
1859 /* Initialize recognition, indicating that volatile is OK. */
1860 init_recog ();
1861
1862 /* Scan through all the insns, instantiating every virtual register still
1863 present. */
1864 for (insn = insns; insn; insn = NEXT_INSN (insn))
1865 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1866 || GET_CODE (insn) == CALL_INSN)
1867 {
1868 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1869 instantiate_virtual_regs_1 (&REG_NOTES (insn), 0, 0);
1870 }
1871
1872 /* Now instantiate the remaining register equivalences for debugging info.
1873 These will not be valid addresses. */
1874 instantiate_decls (fndecl, 0);
1875
1876 /* Indicate that, from now on, assign_stack_local should use
1877 frame_pointer_rtx. */
1878 virtuals_instantiated = 1;
1879 }
1880
1881 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1882 all virtual registers in their DECL_RTL's.
1883
1884 If VALID_ONLY, do this only if the resulting address is still valid.
1885 Otherwise, always do it. */
1886
1887 static void
1888 instantiate_decls (fndecl, valid_only)
1889 tree fndecl;
1890 int valid_only;
1891 {
1892 tree decl;
1893
1894 if (TREE_INLINE (fndecl))
1895 /* When compiling an inline function, the obstack used for
1896 rtl allocation is the maybepermanent_obstack. Calling
1897 `resume_temporary_allocation' switches us back to that
1898 obstack while we process this function's parameters. */
1899 resume_temporary_allocation ();
1900
1901 /* Process all parameters of the function. */
1902 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1903 {
1904 if (DECL_RTL (decl) && GET_CODE (DECL_RTL (decl)) == MEM)
1905 instantiate_virtual_regs_1 (&XEXP (DECL_RTL (decl), 0),
1906 valid_only ? DECL_RTL (decl) : 0, 0);
1907 #if 1 /* This is probably correct, but it seems to require fixes
1908 elsewhere in order to work. Let's fix them in 2.1. */
1909 if (DECL_INCOMING_RTL (decl)
1910 && GET_CODE (DECL_INCOMING_RTL (decl)) == MEM)
1911 instantiate_virtual_regs_1 (&XEXP (DECL_INCOMING_RTL (decl), 0),
1912 valid_only ? DECL_INCOMING_RTL (decl) : 0,
1913 0);
1914 #endif
1915 }
1916
1917 /* Now process all variables defined in the function or its subblocks. */
1918 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1919
1920 if (TREE_INLINE (fndecl))
1921 {
1922 /* Save all rtl allocated for this function by raising the
1923 high-water mark on the maybepermanent_obstack. */
1924 preserve_data ();
1925 /* All further rtl allocation is now done in the current_obstack. */
1926 rtl_in_current_obstack ();
1927 }
1928 }
1929
1930 /* Subroutine of instantiate_decls: Process all decls in the given
1931 BLOCK node and all its subblocks. */
1932
1933 static void
1934 instantiate_decls_1 (let, valid_only)
1935 tree let;
1936 int valid_only;
1937 {
1938 tree t;
1939
1940 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1941 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
1942 instantiate_virtual_regs_1 (& XEXP (DECL_RTL (t), 0),
1943 valid_only ? DECL_RTL (t) : 0, 0);
1944
1945 /* Process all subblocks. */
1946 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1947 instantiate_decls_1 (t, valid_only);
1948 }
1949 \f
1950 /* Given a pointer to a piece of rtx and an optional pointer to the
1951 containing object, instantiate any virtual registers present in it.
1952
1953 If EXTRA_INSNS, we always do the replacement and generate
1954 any extra insns before OBJECT. If it zero, we do nothing if replacement
1955 is not valid.
1956
1957 Return 1 if we either had nothing to do or if we were able to do the
1958 needed replacement. Return 0 otherwise; we only return zero if
1959 EXTRA_INSNS is zero.
1960
1961 We first try some simple transformations to avoid the creation of extra
1962 pseudos. */
1963
1964 static int
1965 instantiate_virtual_regs_1 (loc, object, extra_insns)
1966 rtx *loc;
1967 rtx object;
1968 int extra_insns;
1969 {
1970 rtx x;
1971 RTX_CODE code;
1972 rtx new = 0;
1973 int offset;
1974 rtx temp;
1975 rtx seq;
1976 int i, j;
1977 char *fmt;
1978
1979 /* Re-start here to avoid recursion in common cases. */
1980 restart:
1981
1982 x = *loc;
1983 if (x == 0)
1984 return 1;
1985
1986 code = GET_CODE (x);
1987
1988 /* Check for some special cases. */
1989 switch (code)
1990 {
1991 case CONST_INT:
1992 case CONST_DOUBLE:
1993 case CONST:
1994 case SYMBOL_REF:
1995 case CODE_LABEL:
1996 case PC:
1997 case CC0:
1998 case ASM_INPUT:
1999 case ADDR_VEC:
2000 case ADDR_DIFF_VEC:
2001 case RETURN:
2002 return 1;
2003
2004 case SET:
2005 /* We are allowed to set the virtual registers. This means that
2006 that the actual register should receive the source minus the
2007 appropriate offset. This is used, for example, in the handling
2008 of non-local gotos. */
2009 if (SET_DEST (x) == virtual_incoming_args_rtx)
2010 new = arg_pointer_rtx, offset = - in_arg_offset;
2011 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2012 new = frame_pointer_rtx, offset = - var_offset;
2013 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2014 new = stack_pointer_rtx, offset = - dynamic_offset;
2015 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2016 new = stack_pointer_rtx, offset = - out_arg_offset;
2017
2018 if (new)
2019 {
2020 /* The only valid sources here are PLUS or REG. Just do
2021 the simplest possible thing to handle them. */
2022 if (GET_CODE (SET_SRC (x)) != REG
2023 && GET_CODE (SET_SRC (x)) != PLUS)
2024 abort ();
2025
2026 start_sequence ();
2027 if (GET_CODE (SET_SRC (x)) != REG)
2028 temp = force_operand (SET_SRC (x), 0);
2029 else
2030 temp = SET_SRC (x);
2031 temp = force_operand (plus_constant (temp, offset), 0);
2032 seq = get_insns ();
2033 end_sequence ();
2034
2035 emit_insns_before (seq, object);
2036 SET_DEST (x) = new;
2037
2038 if (!validate_change (object, &SET_SRC (x), temp, 0)
2039 || ! extra_insns)
2040 abort ();
2041
2042 return 1;
2043 }
2044
2045 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2046 loc = &SET_SRC (x);
2047 goto restart;
2048
2049 case PLUS:
2050 /* Handle special case of virtual register plus constant. */
2051 if (CONSTANT_P (XEXP (x, 1)))
2052 {
2053 rtx old;
2054
2055 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2056 if (GET_CODE (XEXP (x, 0)) == PLUS)
2057 {
2058 rtx inner = XEXP (XEXP (x, 0), 0);
2059
2060 if (inner == virtual_incoming_args_rtx)
2061 new = arg_pointer_rtx, offset = in_arg_offset;
2062 else if (inner == virtual_stack_vars_rtx)
2063 new = frame_pointer_rtx, offset = var_offset;
2064 else if (inner == virtual_stack_dynamic_rtx)
2065 new = stack_pointer_rtx, offset = dynamic_offset;
2066 else if (inner == virtual_outgoing_args_rtx)
2067 new = stack_pointer_rtx, offset = out_arg_offset;
2068 else
2069 {
2070 loc = &XEXP (x, 0);
2071 goto restart;
2072 }
2073
2074 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2075 extra_insns);
2076 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2077 }
2078
2079 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2080 new = arg_pointer_rtx, offset = in_arg_offset;
2081 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2082 new = frame_pointer_rtx, offset = var_offset;
2083 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2084 new = stack_pointer_rtx, offset = dynamic_offset;
2085 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2086 new = stack_pointer_rtx, offset = out_arg_offset;
2087 else
2088 {
2089 /* We know the second operand is a constant. Unless the
2090 first operand is a REG (which has been already checked),
2091 it needs to be checked. */
2092 if (GET_CODE (XEXP (x, 0)) != REG)
2093 {
2094 loc = &XEXP (x, 0);
2095 goto restart;
2096 }
2097 return 1;
2098 }
2099
2100 old = XEXP (x, 0);
2101 XEXP (x, 0) = new;
2102 new = plus_constant (XEXP (x, 1), offset);
2103
2104 /* If the new constant is zero, try to replace the sum with its
2105 first operand. */
2106 if (new == const0_rtx
2107 && validate_change (object, loc, XEXP (x, 0), 0))
2108 return 1;
2109
2110 /* Next try to replace constant with new one. */
2111 if (!validate_change (object, &XEXP (x, 1), new, 0))
2112 {
2113 if (! extra_insns)
2114 {
2115 XEXP (x, 0) = old;
2116 return 0;
2117 }
2118
2119 /* Otherwise copy the new constant into a register and replace
2120 constant with that register. */
2121 temp = gen_reg_rtx (Pmode);
2122 if (validate_change (object, &XEXP (x, 1), temp, 0))
2123 emit_insn_before (gen_move_insn (temp, new), object);
2124 else
2125 {
2126 /* If that didn't work, replace this expression with a
2127 register containing the sum. */
2128
2129 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2130 XEXP (x, 0) = old;
2131
2132 start_sequence ();
2133 temp = force_operand (new, 0);
2134 seq = get_insns ();
2135 end_sequence ();
2136
2137 emit_insns_before (seq, object);
2138 if (! validate_change (object, loc, temp, 0)
2139 && ! validate_replace_rtx (x, temp, object))
2140 abort ();
2141 }
2142 }
2143
2144 return 1;
2145 }
2146
2147 /* Fall through to generic two-operand expression case. */
2148 case EXPR_LIST:
2149 case CALL:
2150 case COMPARE:
2151 case MINUS:
2152 case MULT:
2153 case DIV: case UDIV:
2154 case MOD: case UMOD:
2155 case AND: case IOR: case XOR:
2156 case LSHIFT: case ASHIFT: case ROTATE:
2157 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2158 case NE: case EQ:
2159 case GE: case GT: case GEU: case GTU:
2160 case LE: case LT: case LEU: case LTU:
2161 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2162 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2163 loc = &XEXP (x, 0);
2164 goto restart;
2165
2166 case MEM:
2167 /* Most cases of MEM that convert to valid addresses have already been
2168 handled by our scan of regno_reg_rtx. The only special handling we
2169 need here is to make a copy of the rtx to ensure it isn't being
2170 shared if we have to change it to a pseudo.
2171
2172 If the rtx is a simple reference to an address via a virtual register,
2173 it can potentially be shared. In such cases, first try to make it
2174 a valid address, which can also be shared. Otherwise, copy it and
2175 proceed normally.
2176
2177 First check for common cases that need no processing. These are
2178 usually due to instantiation already being done on a previous instance
2179 of a shared rtx. */
2180
2181 temp = XEXP (x, 0);
2182 if (CONSTANT_ADDRESS_P (temp)
2183 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2184 || temp == arg_pointer_rtx
2185 #endif
2186 || temp == frame_pointer_rtx)
2187 return 1;
2188
2189 if (GET_CODE (temp) == PLUS
2190 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2191 && (XEXP (temp, 0) == frame_pointer_rtx
2192 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2193 || XEXP (temp, 0) == arg_pointer_rtx
2194 #endif
2195 ))
2196 return 1;
2197
2198 if (temp == virtual_stack_vars_rtx
2199 || temp == virtual_incoming_args_rtx
2200 || (GET_CODE (temp) == PLUS
2201 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2202 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2203 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2204 {
2205 /* This MEM may be shared. If the substitution can be done without
2206 the need to generate new pseudos, we want to do it in place
2207 so all copies of the shared rtx benefit. The call below will
2208 only make substitutions if the resulting address is still
2209 valid.
2210
2211 Note that we cannot pass X as the object in the recursive call
2212 since the insn being processed may not allow all valid
2213 addresses. However, if we were not passed on object, we can
2214 only modify X without copying it if X will have a valid
2215 address.
2216
2217 ??? Also note that this can still lose if OBJECT is an insn that
2218 has less restrictions on an address that some other insn.
2219 In that case, we will modify the shared address. This case
2220 doesn't seem very likely, though. */
2221
2222 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2223 object ? object : x, 0))
2224 return 1;
2225
2226 /* Otherwise make a copy and process that copy. We copy the entire
2227 RTL expression since it might be a PLUS which could also be
2228 shared. */
2229 *loc = x = copy_rtx (x);
2230 }
2231
2232 /* Fall through to generic unary operation case. */
2233 case USE:
2234 case CLOBBER:
2235 case SUBREG:
2236 case STRICT_LOW_PART:
2237 case NEG: case NOT:
2238 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2239 case SIGN_EXTEND: case ZERO_EXTEND:
2240 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2241 case FLOAT: case FIX:
2242 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2243 case ABS:
2244 case SQRT:
2245 case FFS:
2246 /* These case either have just one operand or we know that we need not
2247 check the rest of the operands. */
2248 loc = &XEXP (x, 0);
2249 goto restart;
2250
2251 case REG:
2252 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2253 in front of this insn and substitute the temporary. */
2254 if (x == virtual_incoming_args_rtx)
2255 new = arg_pointer_rtx, offset = in_arg_offset;
2256 else if (x == virtual_stack_vars_rtx)
2257 new = frame_pointer_rtx, offset = var_offset;
2258 else if (x == virtual_stack_dynamic_rtx)
2259 new = stack_pointer_rtx, offset = dynamic_offset;
2260 else if (x == virtual_outgoing_args_rtx)
2261 new = stack_pointer_rtx, offset = out_arg_offset;
2262
2263 if (new)
2264 {
2265 temp = plus_constant (new, offset);
2266 if (!validate_change (object, loc, temp, 0))
2267 {
2268 if (! extra_insns)
2269 return 0;
2270
2271 start_sequence ();
2272 temp = force_operand (temp, 0);
2273 seq = get_insns ();
2274 end_sequence ();
2275
2276 emit_insns_before (seq, object);
2277 if (! validate_change (object, loc, temp, 0)
2278 && ! validate_replace_rtx (x, temp, object))
2279 abort ();
2280 }
2281 }
2282
2283 return 1;
2284 }
2285
2286 /* Scan all subexpressions. */
2287 fmt = GET_RTX_FORMAT (code);
2288 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2289 if (*fmt == 'e')
2290 {
2291 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2292 return 0;
2293 }
2294 else if (*fmt == 'E')
2295 for (j = 0; j < XVECLEN (x, i); j++)
2296 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2297 extra_insns))
2298 return 0;
2299
2300 return 1;
2301 }
2302 \f
2303 /* Optimization: assuming this function does not receive nonlocal gotos,
2304 delete the handlers for such, as well as the insns to establish
2305 and disestablish them. */
2306
2307 static void
2308 delete_handlers ()
2309 {
2310 rtx insn;
2311 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2312 {
2313 /* Delete the handler by turning off the flag that would
2314 prevent jump_optimize from deleting it.
2315 Also permit deletion of the nonlocal labels themselves
2316 if nothing local refers to them. */
2317 if (GET_CODE (insn) == CODE_LABEL)
2318 LABEL_PRESERVE_P (insn) = 0;
2319 if (GET_CODE (insn) == INSN
2320 && GET_CODE (PATTERN (insn)) == SET
2321 && (SET_DEST (PATTERN (insn)) == nonlocal_goto_handler_slot
2322 || SET_SRC (PATTERN (insn)) == nonlocal_goto_handler_slot
2323 || SET_DEST (PATTERN (insn)) == nonlocal_goto_stack_level
2324 || SET_SRC (PATTERN (insn)) == nonlocal_goto_stack_level))
2325 delete_insn (insn);
2326 }
2327 }
2328
2329 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2330 of the current function. */
2331
2332 rtx
2333 nonlocal_label_rtx_list ()
2334 {
2335 tree t;
2336 rtx x = 0;
2337
2338 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2339 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2340
2341 return x;
2342 }
2343 \f
2344 /* Output a USE for any register use in RTL.
2345 This is used with -noreg to mark the extent of lifespan
2346 of any registers used in a user-visible variable's DECL_RTL. */
2347
2348 void
2349 use_variable (rtl)
2350 rtx rtl;
2351 {
2352 if (GET_CODE (rtl) == REG)
2353 /* This is a register variable. */
2354 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2355 else if (GET_CODE (rtl) == MEM
2356 && GET_CODE (XEXP (rtl, 0)) == REG
2357 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2358 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2359 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2360 /* This is a variable-sized structure. */
2361 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2362 }
2363
2364 /* Like use_variable except that it outputs the USEs after INSN
2365 instead of at the end of the insn-chain. */
2366
2367 void
2368 use_variable_after (rtl, insn)
2369 rtx rtl, insn;
2370 {
2371 if (GET_CODE (rtl) == REG)
2372 /* This is a register variable. */
2373 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2374 else if (GET_CODE (rtl) == MEM
2375 && GET_CODE (XEXP (rtl, 0)) == REG
2376 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2377 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2378 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2379 /* This is a variable-sized structure. */
2380 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2381 }
2382 \f
2383 int
2384 max_parm_reg_num ()
2385 {
2386 return max_parm_reg;
2387 }
2388
2389 /* Return the first insn following those generated by `assign_parms'. */
2390
2391 rtx
2392 get_first_nonparm_insn ()
2393 {
2394 if (last_parm_insn)
2395 return NEXT_INSN (last_parm_insn);
2396 return get_insns ();
2397 }
2398
2399 /* Return 1 if EXP returns an aggregate value, for which an address
2400 must be passed to the function or returned by the function. */
2401
2402 int
2403 aggregate_value_p (exp)
2404 tree exp;
2405 {
2406 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2407 return 1;
2408 if (RETURN_IN_MEMORY (TREE_TYPE (exp)))
2409 return 1;
2410 if (flag_pcc_struct_return
2411 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2412 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE))
2413 return 1;
2414 return 0;
2415 }
2416 \f
2417 /* Assign RTL expressions to the function's parameters.
2418 This may involve copying them into registers and using
2419 those registers as the RTL for them.
2420
2421 If SECOND_TIME is non-zero it means that this function is being
2422 called a second time. This is done by integrate.c when a function's
2423 compilation is deferred. We need to come back here in case the
2424 FUNCTION_ARG macro computes items needed for the rest of the compilation
2425 (such as changing which registers are fixed or caller-saved). But suppress
2426 writing any insns or setting DECL_RTL of anything in this case. */
2427
2428 void
2429 assign_parms (fndecl, second_time)
2430 tree fndecl;
2431 int second_time;
2432 {
2433 register tree parm;
2434 register rtx entry_parm = 0;
2435 register rtx stack_parm = 0;
2436 CUMULATIVE_ARGS args_so_far;
2437 enum machine_mode passed_mode, nominal_mode;
2438 /* Total space needed so far for args on the stack,
2439 given as a constant and a tree-expression. */
2440 struct args_size stack_args_size;
2441 tree fntype = TREE_TYPE (fndecl);
2442 tree fnargs = DECL_ARGUMENTS (fndecl);
2443 /* This is used for the arg pointer when referring to stack args. */
2444 rtx internal_arg_pointer;
2445 /* This is a dummy PARM_DECL that we used for the function result if
2446 the function returns a structure. */
2447 tree function_result_decl = 0;
2448 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2449 int varargs_setup = 0;
2450
2451 /* Nonzero if the last arg is named `__builtin_va_alist',
2452 which is used on some machines for old-fashioned non-ANSI varargs.h;
2453 this should be stuck onto the stack as if it had arrived there. */
2454 int vararg
2455 = (fnargs
2456 && (parm = tree_last (fnargs)) != 0
2457 && DECL_NAME (parm)
2458 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2459 "__builtin_va_alist")));
2460
2461 /* Nonzero if function takes extra anonymous args.
2462 This means the last named arg must be on the stack
2463 right before the anonymous ones. */
2464 int stdarg
2465 = (TYPE_ARG_TYPES (fntype) != 0
2466 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2467 != void_type_node));
2468
2469 /* If the reg that the virtual arg pointer will be translated into is
2470 not a fixed reg or is the stack pointer, make a copy of the virtual
2471 arg pointer, and address parms via the copy. The frame pointer is
2472 considered fixed even though it is not marked as such.
2473
2474 The second time through, simply use ap to avoid generating rtx. */
2475
2476 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2477 || ! (fixed_regs[ARG_POINTER_REGNUM]
2478 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2479 && ! second_time)
2480 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2481 else
2482 internal_arg_pointer = virtual_incoming_args_rtx;
2483 current_function_internal_arg_pointer = internal_arg_pointer;
2484
2485 stack_args_size.constant = 0;
2486 stack_args_size.var = 0;
2487
2488 /* If struct value address is treated as the first argument, make it so. */
2489 if (aggregate_value_p (DECL_RESULT (fndecl))
2490 && ! current_function_returns_pcc_struct
2491 && struct_value_incoming_rtx == 0)
2492 {
2493 tree type = build_pointer_type (fntype);
2494
2495 function_result_decl = build_decl (PARM_DECL, 0, type);
2496
2497 DECL_ARG_TYPE (function_result_decl) = type;
2498 TREE_CHAIN (function_result_decl) = fnargs;
2499 fnargs = function_result_decl;
2500 }
2501
2502 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2503 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2504
2505 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2506 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, 0);
2507 #else
2508 INIT_CUMULATIVE_ARGS (args_so_far, fntype, 0);
2509 #endif
2510
2511 /* We haven't yet found an argument that we must push and pretend the
2512 caller did. */
2513 current_function_pretend_args_size = 0;
2514
2515 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2516 {
2517 int aggregate
2518 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2519 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
2520 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE);
2521 struct args_size stack_offset;
2522 struct args_size arg_size;
2523 int passed_pointer = 0;
2524 tree passed_type = DECL_ARG_TYPE (parm);
2525
2526 /* Set LAST_NAMED if this is last named arg before some
2527 anonymous args. We treat it as if it were anonymous too. */
2528 int last_named = ((TREE_CHAIN (parm) == 0
2529 || DECL_NAME (TREE_CHAIN (parm)) == 0)
2530 && (vararg || stdarg));
2531
2532 if (TREE_TYPE (parm) == error_mark_node
2533 /* This can happen after weird syntax errors
2534 or if an enum type is defined among the parms. */
2535 || TREE_CODE (parm) != PARM_DECL
2536 || passed_type == NULL)
2537 {
2538 DECL_RTL (parm) = gen_rtx (MEM, BLKmode, const0_rtx);
2539 TREE_USED (parm) = 1;
2540 continue;
2541 }
2542
2543 /* For varargs.h function, save info about regs and stack space
2544 used by the individual args, not including the va_alist arg. */
2545 if (vararg && last_named)
2546 current_function_args_info = args_so_far;
2547
2548 /* Find mode of arg as it is passed, and mode of arg
2549 as it should be during execution of this function. */
2550 passed_mode = TYPE_MODE (passed_type);
2551 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
2552
2553 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2554 /* See if this arg was passed by invisible reference. */
2555 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2556 passed_type, ! last_named))
2557 {
2558 passed_type = build_pointer_type (passed_type);
2559 passed_pointer = 1;
2560 passed_mode = nominal_mode = Pmode;
2561 }
2562 #endif
2563
2564 /* Let machine desc say which reg (if any) the parm arrives in.
2565 0 means it arrives on the stack. */
2566 #ifdef FUNCTION_INCOMING_ARG
2567 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2568 passed_type, ! last_named);
2569 #else
2570 entry_parm = FUNCTION_ARG (args_so_far, passed_mode,
2571 passed_type, ! last_named);
2572 #endif
2573
2574 #ifdef SETUP_INCOMING_VARARGS
2575 /* If this is the last named parameter, do any required setup for
2576 varargs or stdargs. We need to know about the case of this being an
2577 addressable type, in which case we skip the registers it
2578 would have arrived in.
2579
2580 For stdargs, LAST_NAMED will be set for two parameters, the one that
2581 is actually the last named, and the dummy parameter. We only
2582 want to do this action once.
2583
2584 Also, indicate when RTL generation is to be suppressed. */
2585 if (last_named && !varargs_setup)
2586 {
2587 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
2588 current_function_pretend_args_size,
2589 second_time);
2590 varargs_setup = 1;
2591 }
2592 #endif
2593
2594 /* Determine parm's home in the stack,
2595 in case it arrives in the stack or we should pretend it did.
2596
2597 Compute the stack position and rtx where the argument arrives
2598 and its size.
2599
2600 There is one complexity here: If this was a parameter that would
2601 have been passed in registers, but wasn't only because it is
2602 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2603 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2604 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2605 0 as it was the previous time. */
2606
2607 locate_and_pad_parm (passed_mode, passed_type,
2608 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2609 1,
2610 #else
2611 #ifdef FUNCTION_INCOMING_ARG
2612 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2613 passed_type,
2614 (! last_named
2615 || varargs_setup)) != 0,
2616 #else
2617 FUNCTION_ARG (args_so_far, passed_mode,
2618 passed_type,
2619 ! last_named || varargs_setup) != 0,
2620 #endif
2621 #endif
2622 fndecl, &stack_args_size, &stack_offset, &arg_size);
2623
2624 if (! second_time)
2625 {
2626 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
2627
2628 if (offset_rtx == const0_rtx)
2629 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
2630 else
2631 stack_parm = gen_rtx (MEM, passed_mode,
2632 gen_rtx (PLUS, Pmode,
2633 internal_arg_pointer, offset_rtx));
2634
2635 /* If this is a memory ref that contains aggregate components,
2636 mark it as such for cse and loop optimize. */
2637 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2638 }
2639
2640 /* If this parameter was passed both in registers and in the stack,
2641 use the copy on the stack. */
2642 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
2643 entry_parm = 0;
2644
2645 /* If this parm was passed part in regs and part in memory,
2646 pretend it arrived entirely in memory
2647 by pushing the register-part onto the stack.
2648
2649 In the special case of a DImode or DFmode that is split,
2650 we could put it together in a pseudoreg directly,
2651 but for now that's not worth bothering with. */
2652
2653 if (entry_parm)
2654 {
2655 int nregs = 0;
2656 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2657 nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
2658 passed_type, ! last_named);
2659 #endif
2660
2661 if (nregs > 0)
2662 {
2663 current_function_pretend_args_size
2664 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
2665 / (PARM_BOUNDARY / BITS_PER_UNIT)
2666 * (PARM_BOUNDARY / BITS_PER_UNIT));
2667
2668 if (! second_time)
2669 move_block_from_reg (REGNO (entry_parm),
2670 validize_mem (stack_parm), nregs);
2671 entry_parm = stack_parm;
2672 }
2673 }
2674
2675 /* If we didn't decide this parm came in a register,
2676 by default it came on the stack. */
2677 if (entry_parm == 0)
2678 entry_parm = stack_parm;
2679
2680 /* Record permanently how this parm was passed. */
2681 if (! second_time)
2682 DECL_INCOMING_RTL (parm) = entry_parm;
2683
2684 /* If there is actually space on the stack for this parm,
2685 count it in stack_args_size; otherwise set stack_parm to 0
2686 to indicate there is no preallocated stack slot for the parm. */
2687
2688 if (entry_parm == stack_parm
2689 #ifdef REG_PARM_STACK_SPACE
2690 /* On some machines, even if a parm value arrives in a register
2691 there is still an (uninitialized) stack slot allocated for it. */
2692 || REG_PARM_STACK_SPACE (fndecl) > 0
2693 #endif
2694 )
2695 {
2696 stack_args_size.constant += arg_size.constant;
2697 if (arg_size.var)
2698 ADD_PARM_SIZE (stack_args_size, arg_size.var);
2699 }
2700 else
2701 /* No stack slot was pushed for this parm. */
2702 stack_parm = 0;
2703
2704 /* Update info on where next arg arrives in registers. */
2705
2706 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
2707 passed_type, ! last_named);
2708
2709 /* If this is our second time through, we are done with this parm. */
2710 if (second_time)
2711 continue;
2712
2713 /* Now adjust STACK_PARM to the mode and precise location
2714 where this parameter should live during execution,
2715 if we discover that it must live in the stack during execution.
2716 To make debuggers happier on big-endian machines, we store
2717 the value in the last bytes of the space available. */
2718
2719 if (nominal_mode != BLKmode && nominal_mode != passed_mode
2720 && stack_parm != 0)
2721 {
2722 rtx offset_rtx;
2723
2724 #if BYTES_BIG_ENDIAN
2725 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
2726 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
2727 - GET_MODE_SIZE (nominal_mode));
2728 #endif
2729
2730 offset_rtx = ARGS_SIZE_RTX (stack_offset);
2731 if (offset_rtx == const0_rtx)
2732 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
2733 else
2734 stack_parm = gen_rtx (MEM, nominal_mode,
2735 gen_rtx (PLUS, Pmode,
2736 internal_arg_pointer, offset_rtx));
2737
2738 /* If this is a memory ref that contains aggregate components,
2739 mark it as such for cse and loop optimize. */
2740 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2741 }
2742
2743 /* ENTRY_PARM is an RTX for the parameter as it arrives,
2744 in the mode in which it arrives.
2745 STACK_PARM is an RTX for a stack slot where the parameter can live
2746 during the function (in case we want to put it there).
2747 STACK_PARM is 0 if no stack slot was pushed for it.
2748
2749 Now output code if necessary to convert ENTRY_PARM to
2750 the type in which this function declares it,
2751 and store that result in an appropriate place,
2752 which may be a pseudo reg, may be STACK_PARM,
2753 or may be a local stack slot if STACK_PARM is 0.
2754
2755 Set DECL_RTL to that place. */
2756
2757 if (nominal_mode == BLKmode)
2758 {
2759 /* If a BLKmode arrives in registers, copy it to a stack slot. */
2760 if (GET_CODE (entry_parm) == REG)
2761 {
2762 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
2763 UNITS_PER_WORD);
2764
2765 /* Note that we will be storing an integral number of words.
2766 So we have to be careful to ensure that we allocate an
2767 integral number of words. We do this below in the
2768 assign_stack_local if space was not allocated in the argument
2769 list. If it was, this will not work if PARM_BOUNDARY is not
2770 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2771 if it becomes a problem. */
2772
2773 if (stack_parm == 0)
2774 stack_parm
2775 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
2776 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
2777 abort ();
2778
2779 move_block_from_reg (REGNO (entry_parm),
2780 validize_mem (stack_parm),
2781 size_stored / UNITS_PER_WORD);
2782 }
2783 DECL_RTL (parm) = stack_parm;
2784 }
2785 else if (! (
2786 #if 0 /* This change was turned off because it makes compilation bigger. */
2787 !optimize
2788 #else /* It's not clear why the following was replaced. */
2789 /* Obsoleted by preceding line. */
2790 (obey_regdecls && ! TREE_REGDECL (parm)
2791 && ! TREE_INLINE (fndecl))
2792 #endif
2793 /* layout_decl may set this. */
2794 || TREE_ADDRESSABLE (parm)
2795 || TREE_SIDE_EFFECTS (parm)
2796 /* If -ffloat-store specified, don't put explicit
2797 float variables into registers. */
2798 || (flag_float_store
2799 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
2800 /* Always assign pseudo to structure return or item passed
2801 by invisible reference. */
2802 || passed_pointer || parm == function_result_decl)
2803 {
2804 /* Store the parm in a pseudoregister during the function. */
2805 register rtx parmreg = gen_reg_rtx (nominal_mode);
2806
2807 REG_USERVAR_P (parmreg) = 1;
2808
2809 /* If this was an item that we received a pointer to, set DECL_RTL
2810 appropriately. */
2811 if (passed_pointer)
2812 {
2813 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
2814 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
2815 }
2816 else
2817 DECL_RTL (parm) = parmreg;
2818
2819 /* Copy the value into the register. */
2820 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
2821 {
2822 /* If ENTRY_PARM is a hard register, it might be in a register
2823 not valid for operating in its mode (e.g., an odd-numbered
2824 register for a DFmode). In that case, moves are the only
2825 thing valid, so we can't do a convert from there. This
2826 occurs when the calling sequence allow such misaligned
2827 usages. */
2828 if (GET_CODE (entry_parm) == REG
2829 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2830 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm),
2831 GET_MODE (entry_parm)))
2832 convert_move (parmreg, copy_to_reg (entry_parm));
2833 else
2834 convert_move (parmreg, validize_mem (entry_parm), 0);
2835 }
2836 else
2837 emit_move_insn (parmreg, validize_mem (entry_parm));
2838
2839 /* In any case, record the parm's desired stack location
2840 in case we later discover it must live in the stack. */
2841 if (REGNO (parmreg) >= nparmregs)
2842 {
2843 rtx *new;
2844 nparmregs = REGNO (parmreg) + 5;
2845 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
2846 bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx));
2847 parm_reg_stack_loc = new;
2848 }
2849 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
2850
2851 /* Mark the register as eliminable if we did no conversion
2852 and it was copied from memory at a fixed offset,
2853 and the arg pointer was not copied to a pseudo-reg.
2854 If the arg pointer is a pseudo reg or the offset formed
2855 an invalid address, such memory-equivalences
2856 as we make here would screw up life analysis for it. */
2857 if (nominal_mode == passed_mode
2858 && GET_CODE (entry_parm) == MEM
2859 && stack_offset.var == 0
2860 && reg_mentioned_p (virtual_incoming_args_rtx,
2861 XEXP (entry_parm, 0)))
2862 REG_NOTES (get_last_insn ())
2863 = gen_rtx (EXPR_LIST, REG_EQUIV,
2864 entry_parm, REG_NOTES (get_last_insn ()));
2865
2866 /* For pointer data type, suggest pointer register. */
2867 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
2868 mark_reg_pointer (parmreg);
2869 }
2870 else
2871 {
2872 /* Value must be stored in the stack slot STACK_PARM
2873 during function execution. */
2874
2875 if (passed_mode != nominal_mode)
2876 {
2877 /* Conversion is required. */
2878 if (GET_CODE (entry_parm) == REG
2879 && REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
2880 && ! HARD_REGNO_MODE_OK (REGNO (entry_parm), passed_mode))
2881 entry_parm = copy_to_reg (entry_parm);
2882
2883 entry_parm = convert_to_mode (nominal_mode, entry_parm, 0);
2884 }
2885
2886 if (entry_parm != stack_parm)
2887 {
2888 if (stack_parm == 0)
2889 stack_parm = assign_stack_local (GET_MODE (entry_parm),
2890 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
2891 emit_move_insn (validize_mem (stack_parm),
2892 validize_mem (entry_parm));
2893 }
2894
2895 DECL_RTL (parm) = stack_parm;
2896 }
2897
2898 /* If this "parameter" was the place where we are receiving the
2899 function's incoming structure pointer, set up the result. */
2900 if (parm == function_result_decl)
2901 DECL_RTL (DECL_RESULT (fndecl))
2902 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
2903
2904 if (TREE_THIS_VOLATILE (parm))
2905 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
2906 if (TREE_READONLY (parm))
2907 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
2908 }
2909
2910 max_parm_reg = max_reg_num ();
2911 last_parm_insn = get_last_insn ();
2912
2913 current_function_args_size = stack_args_size.constant;
2914
2915 /* Adjust function incoming argument size for alignment and
2916 minimum length. */
2917
2918 #ifdef REG_PARM_STACK_SPACE
2919 current_function_args_size = MAX (current_function_args_size,
2920 REG_PARM_STACK_SPACE (fndecl));
2921 #endif
2922
2923 #ifdef STACK_BOUNDARY
2924 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2925
2926 current_function_args_size
2927 = ((current_function_args_size + STACK_BYTES - 1)
2928 / STACK_BYTES) * STACK_BYTES;
2929 #endif
2930
2931 #ifdef ARGS_GROW_DOWNWARD
2932 current_function_arg_offset_rtx
2933 = (stack_args_size.var == 0 ? gen_rtx (CONST_INT, VOIDmode,
2934 -stack_args_size.constant)
2935 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
2936 size_int (-stack_args_size.constant)),
2937 0, VOIDmode, 0));
2938 #else
2939 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
2940 #endif
2941
2942 /* See how many bytes, if any, of its args a function should try to pop
2943 on return. */
2944
2945 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
2946 current_function_args_size);
2947
2948 /* For stdarg.h function, save info about regs and stack space
2949 used by the named args. */
2950
2951 if (stdarg)
2952 current_function_args_info = args_so_far;
2953
2954 /* Set the rtx used for the function return value. Put this in its
2955 own variable so any optimizers that need this information don't have
2956 to include tree.h. Do this here so it gets done when an inlined
2957 function gets output. */
2958
2959 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
2960 }
2961 \f
2962 /* Compute the size and offset from the start of the stacked arguments for a
2963 parm passed in mode PASSED_MODE and with type TYPE.
2964
2965 INITIAL_OFFSET_PTR points to the current offset into the stacked
2966 arguments.
2967
2968 The starting offset and size for this parm are returned in *OFFSET_PTR
2969 and *ARG_SIZE_PTR, respectively.
2970
2971 IN_REGS is non-zero if the argument will be passed in registers. It will
2972 never be set if REG_PARM_STACK_SPACE is not defined.
2973
2974 FNDECL is the function in which the argument was defined.
2975
2976 There are two types of rounding that are done. The first, controlled by
2977 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
2978 list to be aligned to the specific boundary (in bits). This rounding
2979 affects the initial and starting offsets, but not the argument size.
2980
2981 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
2982 optionally rounds the size of the parm to PARM_BOUNDARY. The
2983 initial offset is not affected by this rounding, while the size always
2984 is and the starting offset may be. */
2985
2986 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
2987 initial_offset_ptr is positive because locate_and_pad_parm's
2988 callers pass in the total size of args so far as
2989 initial_offset_ptr. arg_size_ptr is always positive.*/
2990
2991 static void pad_to_arg_alignment (), pad_below ();
2992
2993 void
2994 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
2995 initial_offset_ptr, offset_ptr, arg_size_ptr)
2996 enum machine_mode passed_mode;
2997 tree type;
2998 int in_regs;
2999 tree fndecl;
3000 struct args_size *initial_offset_ptr;
3001 struct args_size *offset_ptr;
3002 struct args_size *arg_size_ptr;
3003 {
3004 tree sizetree
3005 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3006 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3007 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3008 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3009 int reg_parm_stack_space = 0;
3010
3011 #ifdef REG_PARM_STACK_SPACE
3012 /* If we have found a stack parm before we reach the end of the
3013 area reserved for registers, skip that area. */
3014 if (! in_regs)
3015 {
3016 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3017 if (reg_parm_stack_space > 0)
3018 {
3019 if (initial_offset_ptr->var)
3020 {
3021 initial_offset_ptr->var
3022 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3023 size_int (reg_parm_stack_space));
3024 initial_offset_ptr->constant = 0;
3025 }
3026 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3027 initial_offset_ptr->constant = reg_parm_stack_space;
3028 }
3029 }
3030 #endif /* REG_PARM_STACK_SPACE */
3031
3032 arg_size_ptr->var = 0;
3033 arg_size_ptr->constant = 0;
3034
3035 #ifdef ARGS_GROW_DOWNWARD
3036 if (initial_offset_ptr->var)
3037 {
3038 offset_ptr->constant = 0;
3039 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3040 initial_offset_ptr->var);
3041 }
3042 else
3043 {
3044 offset_ptr->constant = - initial_offset_ptr->constant;
3045 offset_ptr->var = 0;
3046 }
3047 if (where_pad == upward
3048 && (TREE_CODE (sizetree) != INTEGER_CST
3049 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3050 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3051 SUB_PARM_SIZE (*offset_ptr, sizetree);
3052 pad_to_arg_alignment (offset_ptr, boundary);
3053 if (initial_offset_ptr->var)
3054 {
3055 arg_size_ptr->var = size_binop (MINUS_EXPR,
3056 size_binop (MINUS_EXPR,
3057 integer_zero_node,
3058 initial_offset_ptr->var),
3059 offset_ptr->var);
3060 }
3061 else
3062 {
3063 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3064 offset_ptr->constant);
3065 }
3066 /* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3067 if (where_pad == downward)
3068 pad_below (arg_size_ptr, passed_mode, sizetree);
3069 #else /* !ARGS_GROW_DOWNWARD */
3070 pad_to_arg_alignment (initial_offset_ptr, boundary);
3071 *offset_ptr = *initial_offset_ptr;
3072 if (where_pad == downward)
3073 pad_below (offset_ptr, passed_mode, sizetree);
3074
3075 #ifdef PUSH_ROUNDING
3076 if (passed_mode != BLKmode)
3077 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3078 #endif
3079
3080 if (where_pad != none
3081 && (TREE_CODE (sizetree) != INTEGER_CST
3082 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3083 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3084
3085 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3086 #endif /* ARGS_GROW_DOWNWARD */
3087 }
3088
3089 static void
3090 pad_to_arg_alignment (offset_ptr, boundary)
3091 struct args_size *offset_ptr;
3092 int boundary;
3093 {
3094 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3095
3096 if (boundary > BITS_PER_UNIT)
3097 {
3098 if (offset_ptr->var)
3099 {
3100 offset_ptr->var =
3101 #ifdef ARGS_GROW_DOWNWARD
3102 round_down
3103 #else
3104 round_up
3105 #endif
3106 (ARGS_SIZE_TREE (*offset_ptr),
3107 boundary / BITS_PER_UNIT);
3108 offset_ptr->constant = 0; /*?*/
3109 }
3110 else
3111 offset_ptr->constant =
3112 #ifdef ARGS_GROW_DOWNWARD
3113 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3114 #else
3115 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3116 #endif
3117 }
3118 }
3119
3120 static void
3121 pad_below (offset_ptr, passed_mode, sizetree)
3122 struct args_size *offset_ptr;
3123 enum machine_mode passed_mode;
3124 tree sizetree;
3125 {
3126 if (passed_mode != BLKmode)
3127 {
3128 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3129 offset_ptr->constant
3130 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3131 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3132 - GET_MODE_SIZE (passed_mode));
3133 }
3134 else
3135 {
3136 if (TREE_CODE (sizetree) != INTEGER_CST
3137 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3138 {
3139 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3140 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3141 /* Add it in. */
3142 ADD_PARM_SIZE (*offset_ptr, s2);
3143 SUB_PARM_SIZE (*offset_ptr, sizetree);
3144 }
3145 }
3146 }
3147
3148 static tree
3149 round_down (value, divisor)
3150 tree value;
3151 int divisor;
3152 {
3153 return size_binop (MULT_EXPR,
3154 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3155 size_int (divisor));
3156 }
3157 \f
3158 /* Walk the tree of blocks describing the binding levels within a function
3159 and warn about uninitialized variables.
3160 This is done after calling flow_analysis and before global_alloc
3161 clobbers the pseudo-regs to hard regs. */
3162
3163 void
3164 uninitialized_vars_warning (block)
3165 tree block;
3166 {
3167 register tree decl, sub;
3168 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3169 {
3170 if (TREE_CODE (decl) == VAR_DECL
3171 /* These warnings are unreliable for and aggregates
3172 because assigning the fields one by one can fail to convince
3173 flow.c that the entire aggregate was initialized.
3174 Unions are troublesome because members may be shorter. */
3175 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3176 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
3177 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3178 && DECL_RTL (decl) != 0
3179 && GET_CODE (DECL_RTL (decl)) == REG
3180 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3181 warning_with_decl (decl,
3182 "`%s' may be used uninitialized in this function");
3183 if (TREE_CODE (decl) == VAR_DECL
3184 && DECL_RTL (decl) != 0
3185 && GET_CODE (DECL_RTL (decl)) == REG
3186 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3187 warning_with_decl (decl,
3188 "variable `%s' may be clobbered by `longjmp'");
3189 }
3190 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3191 uninitialized_vars_warning (sub);
3192 }
3193
3194 /* Do the appropriate part of uninitialized_vars_warning
3195 but for arguments instead of local variables. */
3196
3197 void
3198 setjmp_args_warning (block)
3199 tree block;
3200 {
3201 register tree decl;
3202 for (decl = DECL_ARGUMENTS (current_function_decl);
3203 decl; decl = TREE_CHAIN (decl))
3204 if (DECL_RTL (decl) != 0
3205 && GET_CODE (DECL_RTL (decl)) == REG
3206 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3207 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp'");
3208 }
3209
3210 /* If this function call setjmp, put all vars into the stack
3211 unless they were declared `register'. */
3212
3213 void
3214 setjmp_protect (block)
3215 tree block;
3216 {
3217 register tree decl, sub;
3218 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3219 if ((TREE_CODE (decl) == VAR_DECL
3220 || TREE_CODE (decl) == PARM_DECL)
3221 && DECL_RTL (decl) != 0
3222 && GET_CODE (DECL_RTL (decl)) == REG
3223 /* If this variable came from an inline function, it must be
3224 that it's life doesn't overlap the setjmp. If there was a
3225 setjmp in the function, it would already be in memory. We
3226 must exclude such variable because their DECL_RTL might be
3227 set to strange things such as virtual_stack_vars_rtx. */
3228 && ! DECL_FROM_INLINE (decl)
3229 && (
3230 #ifdef NON_SAVING_SETJMP
3231 /* If longjmp doesn't restore the registers,
3232 don't put anything in them. */
3233 NON_SAVING_SETJMP
3234 ||
3235 #endif
3236 ! TREE_REGDECL (decl)))
3237 put_var_into_stack (decl);
3238 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3239 setjmp_protect (sub);
3240 }
3241 \f
3242 /* Like the previous function, but for args instead of local variables. */
3243
3244 void
3245 setjmp_protect_args ()
3246 {
3247 register tree decl, sub;
3248 for (decl = DECL_ARGUMENTS (current_function_decl);
3249 decl; decl = TREE_CHAIN (decl))
3250 if ((TREE_CODE (decl) == VAR_DECL
3251 || TREE_CODE (decl) == PARM_DECL)
3252 && DECL_RTL (decl) != 0
3253 && GET_CODE (DECL_RTL (decl)) == REG
3254 && (
3255 /* If longjmp doesn't restore the registers,
3256 don't put anything in them. */
3257 #ifdef NON_SAVING_SETJMP
3258 NON_SAVING_SETJMP
3259 ||
3260 #endif
3261 ! TREE_REGDECL (decl)))
3262 put_var_into_stack (decl);
3263 }
3264 \f
3265 /* Return the context-pointer register corresponding to DECL,
3266 or 0 if it does not need one. */
3267
3268 rtx
3269 lookup_static_chain (decl)
3270 tree decl;
3271 {
3272 tree context = decl_function_context (decl);
3273 tree link;
3274
3275 if (context == 0)
3276 return 0;
3277
3278 /* We treat inline_function_decl as an alias for the current function
3279 because that is the inline function whose vars, types, etc.
3280 are being merged into the current function.
3281 See expand_inline_function. */
3282 if (context == current_function_decl || context == inline_function_decl)
3283 return virtual_stack_vars_rtx;
3284
3285 for (link = context_display; link; link = TREE_CHAIN (link))
3286 if (TREE_PURPOSE (link) == context)
3287 return RTL_EXPR_RTL (TREE_VALUE (link));
3288
3289 abort ();
3290 }
3291 \f
3292 /* Convert a stack slot address ADDR for variable VAR
3293 (from a containing function)
3294 into an address valid in this function (using a static chain). */
3295
3296 rtx
3297 fix_lexical_addr (addr, var)
3298 rtx addr;
3299 tree var;
3300 {
3301 rtx basereg;
3302 int displacement;
3303 tree context = decl_function_context (var);
3304 struct function *fp;
3305 rtx base = 0;
3306
3307 /* If this is the present function, we need not do anything. */
3308 if (context == current_function_decl || context == inline_function_decl)
3309 return addr;
3310
3311 for (fp = outer_function_chain; fp; fp = fp->next)
3312 if (fp->decl == context)
3313 break;
3314
3315 if (fp == 0)
3316 abort ();
3317
3318 /* Decode given address as base reg plus displacement. */
3319 if (GET_CODE (addr) == REG)
3320 basereg = addr, displacement = 0;
3321 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3322 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3323 else
3324 abort ();
3325
3326 /* We accept vars reached via the containing function's
3327 incoming arg pointer and via its stack variables pointer. */
3328 if (basereg == fp->internal_arg_pointer)
3329 {
3330 /* If reached via arg pointer, get the arg pointer value
3331 out of that function's stack frame.
3332
3333 There are two cases: If a separate ap is needed, allocate a
3334 slot in the outer function for it and dereference it that way.
3335 This is correct even if the real ap is actually a pseudo.
3336 Otherwise, just adjust the offset from the frame pointer to
3337 compensate. */
3338
3339 #ifdef NEED_SEPARATE_AP
3340 rtx addr;
3341
3342 if (fp->arg_pointer_save_area == 0)
3343 fp->arg_pointer_save_area
3344 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
3345
3346 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
3347 addr = memory_address (Pmode, addr);
3348
3349 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
3350 #else
3351 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
3352 base = lookup_static_chain (var);
3353 #endif
3354 }
3355
3356 else if (basereg == virtual_stack_vars_rtx)
3357 {
3358 /* This is the same code as lookup_static_chain, duplicated here to
3359 avoid an extra call to decl_function_context. */
3360 tree link;
3361
3362 for (link = context_display; link; link = TREE_CHAIN (link))
3363 if (TREE_PURPOSE (link) == context)
3364 {
3365 base = RTL_EXPR_RTL (TREE_VALUE (link));
3366 break;
3367 }
3368 }
3369
3370 if (base == 0)
3371 abort ();
3372
3373 /* Use same offset, relative to appropriate static chain or argument
3374 pointer. */
3375 return plus_constant (base, displacement);
3376 }
3377 \f
3378 /* Return the address of the trampoline for entering nested fn FUNCTION.
3379 If necessary, allocate a trampoline (in the stack frame)
3380 and emit rtl to initialize its contents (at entry to this function). */
3381
3382 rtx
3383 trampoline_address (function)
3384 tree function;
3385 {
3386 tree link;
3387 tree rtlexp;
3388 rtx tramp;
3389 struct function *fp;
3390 tree fn_context;
3391
3392 /* Find an existing trampoline and return it. */
3393 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3394 if (TREE_PURPOSE (link) == function)
3395 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
3396 for (fp = outer_function_chain; fp; fp = fp->next)
3397 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
3398 if (TREE_PURPOSE (link) == function)
3399 {
3400 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
3401 function);
3402 return round_trampoline_addr (tramp);
3403 }
3404
3405 /* None exists; we must make one. */
3406
3407 /* Find the `struct function' for the function containing FUNCTION. */
3408 fp = 0;
3409 fn_context = decl_function_context (function);
3410 if (fn_context != current_function_decl)
3411 for (fp = outer_function_chain; fp; fp = fp->next)
3412 if (fp->decl == fn_context)
3413 break;
3414
3415 /* Allocate run-time space for this trampoline
3416 (usually in the defining function's stack frame). */
3417 #ifdef ALLOCATE_TRAMPOLINE
3418 tramp = ALLOCATE_TRAMPOLINE (fp);
3419 #else
3420 /* If rounding needed, allocate extra space
3421 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3422 #ifdef TRAMPOLINE_ALIGNMENT
3423 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3424 #else
3425 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3426 #endif
3427 if (fp != 0)
3428 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
3429 else
3430 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
3431 #endif
3432
3433 /* Record the trampoline for reuse and note it for later initialization
3434 by expand_function_end. */
3435 if (fp != 0)
3436 {
3437 push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack);
3438 rtlexp = make_node (RTL_EXPR);
3439 RTL_EXPR_RTL (rtlexp) = tramp;
3440 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
3441 pop_obstacks ();
3442 }
3443 else
3444 {
3445 /* Make the RTL_EXPR node temporary, not momentary, so that the
3446 trampoline_list doesn't become garbage. */
3447 int momentary = suspend_momentary ();
3448 rtlexp = make_node (RTL_EXPR);
3449 resume_momentary (momentary);
3450
3451 RTL_EXPR_RTL (rtlexp) = tramp;
3452 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
3453 }
3454
3455 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
3456 return round_trampoline_addr (tramp);
3457 }
3458
3459 /* Given a trampoline address,
3460 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3461
3462 static rtx
3463 round_trampoline_addr (tramp)
3464 rtx tramp;
3465 {
3466 #ifdef TRAMPOLINE_ALIGNMENT
3467 /* Round address up to desired boundary. */
3468 rtx temp = gen_reg_rtx (Pmode);
3469 temp = expand_binop (Pmode, add_optab, tramp,
3470 gen_rtx (CONST_INT, VOIDmode, TRAMPOLINE_ALIGNMENT - 1),
3471 temp, 0, OPTAB_LIB_WIDEN);
3472 tramp = expand_binop (Pmode, and_optab, temp,
3473 gen_rtx (CONST_INT, VOIDmode, - TRAMPOLINE_ALIGNMENT),
3474 temp, 0, OPTAB_LIB_WIDEN);
3475 #endif
3476 return tramp;
3477 }
3478 \f
3479 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3480 and initialize static variables for generating RTL for the statements
3481 of the function. */
3482
3483 void
3484 init_function_start (subr, filename, line)
3485 tree subr;
3486 char *filename;
3487 int line;
3488 {
3489 char *junk;
3490
3491 init_stmt_for_function ();
3492
3493 cse_not_expected = ! optimize;
3494
3495 /* Caller save not needed yet. */
3496 caller_save_needed = 0;
3497
3498 /* No stack slots have been made yet. */
3499 stack_slot_list = 0;
3500
3501 /* There is no stack slot for handling nonlocal gotos. */
3502 nonlocal_goto_handler_slot = 0;
3503 nonlocal_goto_stack_level = 0;
3504
3505 /* No labels have been declared for nonlocal use. */
3506 nonlocal_labels = 0;
3507
3508 /* No function calls so far in this function. */
3509 function_call_count = 0;
3510
3511 /* No parm regs have been allocated.
3512 (This is important for output_inline_function.) */
3513 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3514
3515 /* Initialize the RTL mechanism. */
3516 init_emit ();
3517
3518 /* Initialize the queue of pending postincrement and postdecrements,
3519 and some other info in expr.c. */
3520 init_expr ();
3521
3522 /* We haven't done register allocation yet. */
3523 reg_renumber = 0;
3524
3525 init_const_rtx_hash_table ();
3526
3527 current_function_name = (*decl_printable_name) (subr, &junk);
3528
3529 /* Nonzero if this is a nested function that uses a static chain. */
3530
3531 current_function_needs_context
3532 = (decl_function_context (current_function_decl) != 0);
3533
3534 /* Set if a call to setjmp is seen. */
3535 current_function_calls_setjmp = 0;
3536
3537 /* Set if a call to longjmp is seen. */
3538 current_function_calls_longjmp = 0;
3539
3540 current_function_calls_alloca = 0;
3541 current_function_has_nonlocal_label = 0;
3542 current_function_contains_functions = 0;
3543
3544 current_function_returns_pcc_struct = 0;
3545 current_function_returns_struct = 0;
3546 current_function_epilogue_delay_list = 0;
3547 current_function_uses_const_pool = 0;
3548 current_function_uses_pic_offset_table = 0;
3549
3550 /* We have not yet needed to make a label to jump to for tail-recursion. */
3551 tail_recursion_label = 0;
3552
3553 /* We haven't had a need to make a save area for ap yet. */
3554
3555 arg_pointer_save_area = 0;
3556
3557 /* No stack slots allocated yet. */
3558 frame_offset = 0;
3559
3560 /* No SAVE_EXPRs in this function yet. */
3561 save_expr_regs = 0;
3562
3563 /* No RTL_EXPRs in this function yet. */
3564 rtl_expr_chain = 0;
3565
3566 /* We have not allocated any temporaries yet. */
3567 temp_slots = 0;
3568 temp_slot_level = 0;
3569
3570 /* Within function body, compute a type's size as soon it is laid out. */
3571 immediate_size_expand++;
3572
3573 init_pending_stack_adjust ();
3574 inhibit_defer_pop = 0;
3575
3576 current_function_outgoing_args_size = 0;
3577
3578 /* Initialize the insn lengths. */
3579 init_insn_lengths ();
3580
3581 /* Prevent ever trying to delete the first instruction of a function.
3582 Also tell final how to output a linenum before the function prologue. */
3583 emit_line_note (filename, line);
3584
3585 /* Make sure first insn is a note even if we don't want linenums.
3586 This makes sure the first insn will never be deleted.
3587 Also, final expects a note to appear there. */
3588 emit_note (0, NOTE_INSN_DELETED);
3589
3590 /* Set flags used by final.c. */
3591 if (aggregate_value_p (DECL_RESULT (subr)))
3592 {
3593 #ifdef PCC_STATIC_STRUCT_RETURN
3594 if (flag_pcc_struct_return)
3595 current_function_returns_pcc_struct = 1;
3596 else
3597 #endif
3598 current_function_returns_struct = 1;
3599 }
3600
3601 /* Warn if this value is an aggregate type,
3602 regardless of which calling convention we are using for it. */
3603 if (warn_aggregate_return
3604 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
3605 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
3606 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
3607 warning ("function returns an aggregate");
3608
3609 current_function_returns_pointer
3610 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
3611
3612 /* Indicate that we need to distinguish between the return value of the
3613 present function and the return value of a function being called. */
3614 rtx_equal_function_value_matters = 1;
3615
3616 /* Indicate that we have not instantiated virtual registers yet. */
3617 virtuals_instantiated = 0;
3618
3619 /* Indicate we have no need of a frame pointer yet. */
3620 frame_pointer_needed = 0;
3621
3622 /* By default assume not varargs. */
3623 current_function_varargs = 0;
3624 }
3625
3626 /* Indicate that the current function uses extra args
3627 not explicitly mentioned in the argument list in any fashion. */
3628
3629 void
3630 mark_varargs ()
3631 {
3632 current_function_varargs = 1;
3633 }
3634
3635 /* Expand a call to __main at the beginning of a possible main function. */
3636
3637 void
3638 expand_main_function ()
3639 {
3640 #if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
3641 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__main"), 0,
3642 VOIDmode, 0);
3643 #endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
3644 }
3645 \f
3646 /* Start the RTL for a new function, and set variables used for
3647 emitting RTL.
3648 SUBR is the FUNCTION_DECL node.
3649 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
3650 the function's parameters, which must be run at any return statement. */
3651
3652 void
3653 expand_function_start (subr, parms_have_cleanups)
3654 tree subr;
3655 int parms_have_cleanups;
3656 {
3657 register int i;
3658 tree tem;
3659 rtx last_ptr;
3660
3661 /* Make sure volatile mem refs aren't considered
3662 valid operands of arithmetic insns. */
3663 init_recog_no_volatile ();
3664
3665 /* If function gets a static chain arg, store it in the stack frame.
3666 Do this first, so it gets the first stack slot offset. */
3667 if (current_function_needs_context)
3668 emit_move_insn (assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0),
3669 static_chain_incoming_rtx);
3670
3671 /* If the parameters of this function need cleaning up, get a label
3672 for the beginning of the code which executes those cleanups. This must
3673 be done before doing anything with return_label. */
3674 if (parms_have_cleanups)
3675 cleanup_label = gen_label_rtx ();
3676 else
3677 cleanup_label = 0;
3678
3679 /* Make the label for return statements to jump to, if this machine
3680 does not have a one-instruction return and uses an epilogue,
3681 or if it returns a structure, or if it has parm cleanups. */
3682 #ifdef HAVE_return
3683 if (cleanup_label == 0 && HAVE_return
3684 && ! current_function_returns_pcc_struct
3685 && ! (current_function_returns_struct && ! optimize))
3686 return_label = 0;
3687 else
3688 return_label = gen_label_rtx ();
3689 #else
3690 return_label = gen_label_rtx ();
3691 #endif
3692
3693 /* Initialize rtx used to return the value. */
3694 /* Do this before assign_parms so that we copy the struct value address
3695 before any library calls that assign parms might generate. */
3696
3697 /* Decide whether to return the value in memory or in a register. */
3698 if (aggregate_value_p (DECL_RESULT (subr)))
3699 {
3700 /* Returning something that won't go in a register. */
3701 register rtx value_address;
3702
3703 #ifdef PCC_STATIC_STRUCT_RETURN
3704 if (current_function_returns_pcc_struct)
3705 {
3706 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
3707 value_address = assemble_static_space (size);
3708 }
3709 else
3710 #endif
3711 {
3712 /* Expect to be passed the address of a place to store the value.
3713 If it is passed as an argument, assign_parms will take care of
3714 it. */
3715 if (struct_value_incoming_rtx)
3716 {
3717 value_address = gen_reg_rtx (Pmode);
3718 emit_move_insn (value_address, struct_value_incoming_rtx);
3719 }
3720 }
3721 if (value_address)
3722 DECL_RTL (DECL_RESULT (subr))
3723 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
3724 value_address);
3725 }
3726 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
3727 /* If return mode is void, this decl rtl should not be used. */
3728 DECL_RTL (DECL_RESULT (subr)) = 0;
3729 else if (parms_have_cleanups)
3730 /* If function will end with cleanup code for parms,
3731 compute the return values into a pseudo reg,
3732 which we will copy into the true return register
3733 after the cleanups are done. */
3734 DECL_RTL (DECL_RESULT (subr))
3735 = gen_reg_rtx (DECL_MODE (DECL_RESULT (subr)));
3736 else
3737 /* Scalar, returned in a register. */
3738 {
3739 #ifdef FUNCTION_OUTGOING_VALUE
3740 DECL_RTL (DECL_RESULT (subr))
3741 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
3742 #else
3743 DECL_RTL (DECL_RESULT (subr))
3744 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
3745 #endif
3746
3747 /* Mark this reg as the function's return value. */
3748 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
3749 {
3750 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
3751 /* Needed because we may need to move this to memory
3752 in case it's a named return value whose address is taken. */
3753 TREE_REGDECL (DECL_RESULT (subr)) = 1;
3754 }
3755 }
3756
3757 /* Initialize rtx for parameters and local variables.
3758 In some cases this requires emitting insns. */
3759
3760 assign_parms (subr, 0);
3761
3762 /* The following was moved from init_function_start.
3763 The move is supposed to make sdb output more accurate. */
3764 /* Indicate the beginning of the function body,
3765 as opposed to parm setup. */
3766 emit_note (0, NOTE_INSN_FUNCTION_BEG);
3767
3768 /* If doing stupid allocation, mark parms as born here. */
3769
3770 if (GET_CODE (get_last_insn ()) != NOTE)
3771 emit_note (0, NOTE_INSN_DELETED);
3772 parm_birth_insn = get_last_insn ();
3773
3774 if (obey_regdecls)
3775 {
3776 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
3777 use_variable (regno_reg_rtx[i]);
3778
3779 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
3780 use_variable (current_function_internal_arg_pointer);
3781 }
3782
3783 /* Fetch static chain values for containing functions. */
3784 tem = decl_function_context (current_function_decl);
3785 if (tem)
3786 last_ptr = copy_to_reg (static_chain_incoming_rtx);
3787 context_display = 0;
3788 while (tem)
3789 {
3790 tree rtlexp = make_node (RTL_EXPR);
3791
3792 RTL_EXPR_RTL (rtlexp) = last_ptr;
3793 context_display = tree_cons (tem, rtlexp, context_display);
3794 tem = decl_function_context (tem);
3795 if (tem == 0)
3796 break;
3797 /* Chain thru stack frames, assuming pointer to next lexical frame
3798 is found at the place we always store it. */
3799 #ifdef FRAME_GROWS_DOWNWARD
3800 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
3801 #endif
3802 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
3803 memory_address (Pmode, last_ptr)));
3804 }
3805
3806 /* After the display initializations is where the tail-recursion label
3807 should go, if we end up needing one. Ensure we have a NOTE here
3808 since some things (like trampolines) get placed before this. */
3809 tail_recursion_reentry = emit_note (0, NOTE_INSN_DELETED);
3810
3811 /* Evaluate now the sizes of any types declared among the arguments. */
3812 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
3813 expand_expr (TREE_VALUE (tem), 0, VOIDmode, 0);
3814
3815 /* Make sure there is a line number after the function entry setup code. */
3816 force_next_line_note ();
3817 }
3818 \f
3819 /* Generate RTL for the end of the current function.
3820 FILENAME and LINE are the current position in the source file. */
3821
3822 /* It is up to language-specific callers to do cleanups for parameters. */
3823
3824 void
3825 expand_function_end (filename, line)
3826 char *filename;
3827 int line;
3828 {
3829 register int i;
3830 tree link;
3831
3832 static rtx initial_trampoline;
3833
3834 #ifdef NON_SAVING_SETJMP
3835 /* Don't put any variables in registers if we call setjmp
3836 on a machine that fails to restore the registers. */
3837 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
3838 {
3839 setjmp_protect (DECL_INITIAL (current_function_decl));
3840 setjmp_protect_args ();
3841 }
3842 #endif
3843
3844 /* Save the argument pointer if a save area was made for it. */
3845 if (arg_pointer_save_area)
3846 {
3847 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
3848 emit_insn_before (x, tail_recursion_reentry);
3849 }
3850
3851 /* Initialize any trampolines required by this function. */
3852 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3853 {
3854 tree function = TREE_PURPOSE (link);
3855 rtx context = lookup_static_chain (function);
3856 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
3857 rtx seq;
3858
3859 /* First make sure this compilation has a template for
3860 initializing trampolines. */
3861 if (initial_trampoline == 0)
3862 {
3863 end_temporary_allocation ();
3864 initial_trampoline
3865 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
3866 resume_temporary_allocation ();
3867 }
3868
3869 /* Generate insns to initialize the trampoline. */
3870 start_sequence ();
3871 tramp = change_address (initial_trampoline, BLKmode,
3872 round_trampoline_addr (XEXP (tramp, 0)));
3873 emit_block_move (tramp, initial_trampoline,
3874 gen_rtx (CONST_INT, VOIDmode, TRAMPOLINE_SIZE),
3875 FUNCTION_BOUNDARY / BITS_PER_UNIT);
3876 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
3877 XEXP (DECL_RTL (function), 0), context);
3878 seq = get_insns ();
3879 end_sequence ();
3880
3881 /* Put those insns at entry to the containing function (this one). */
3882 emit_insns_before (seq, tail_recursion_reentry);
3883 }
3884 /* Clear the trampoline_list for the next function. */
3885 trampoline_list = 0;
3886
3887 #if 0 /* I think unused parms are legitimate enough. */
3888 /* Warn about unused parms. */
3889 if (warn_unused)
3890 {
3891 rtx decl;
3892
3893 for (decl = DECL_ARGUMENTS (current_function_decl);
3894 decl; decl = TREE_CHAIN (decl))
3895 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
3896 warning_with_decl (decl, "unused parameter `%s'");
3897 }
3898 #endif
3899
3900 /* Delete handlers for nonlocal gotos if nothing uses them. */
3901 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
3902 delete_handlers ();
3903
3904 /* End any sequences that failed to be closed due to syntax errors. */
3905 while (in_sequence_p ())
3906 end_sequence (0);
3907
3908 /* Outside function body, can't compute type's actual size
3909 until next function's body starts. */
3910 immediate_size_expand--;
3911
3912 /* If doing stupid register allocation,
3913 mark register parms as dying here. */
3914
3915 if (obey_regdecls)
3916 {
3917 rtx tem;
3918 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
3919 use_variable (regno_reg_rtx[i]);
3920
3921 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
3922
3923 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
3924 {
3925 use_variable (XEXP (tem, 0));
3926 use_variable_after (XEXP (tem, 0), parm_birth_insn);
3927 }
3928
3929 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
3930 use_variable (current_function_internal_arg_pointer);
3931 }
3932
3933 clear_pending_stack_adjust ();
3934 do_pending_stack_adjust ();
3935
3936 /* Mark the end of the function body.
3937 If control reaches this insn, the function can drop through
3938 without returning a value. */
3939 emit_note (0, NOTE_INSN_FUNCTION_END);
3940
3941 /* Output a linenumber for the end of the function.
3942 SDB depends on this. */
3943 emit_line_note_force (filename, line);
3944
3945 /* Output the label for the actual return from the function,
3946 if one is expected. This happens either because a function epilogue
3947 is used instead of a return instruction, or because a return was done
3948 with a goto in order to run local cleanups, or because of pcc-style
3949 structure returning. */
3950
3951 if (return_label)
3952 emit_label (return_label);
3953
3954 /* If we had calls to alloca, and this machine needs
3955 an accurate stack pointer to exit the function,
3956 insert some code to save and restore the stack pointer. */
3957 #ifdef EXIT_IGNORE_STACK
3958 if (! EXIT_IGNORE_STACK)
3959 #endif
3960 if (current_function_calls_alloca)
3961 {
3962 rtx tem = gen_reg_rtx (Pmode);
3963 emit_insn_after (gen_rtx (SET, VOIDmode, tem, stack_pointer_rtx),
3964 parm_birth_insn);
3965 emit_insn (gen_rtx (SET, VOIDmode, stack_pointer_rtx, tem));
3966 }
3967
3968 /* If scalar return value was computed in a pseudo-reg,
3969 copy that to the hard return register. */
3970 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
3971 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
3972 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
3973 >= FIRST_PSEUDO_REGISTER))
3974 {
3975 rtx real_decl_result;
3976
3977 #ifdef FUNCTION_OUTGOING_VALUE
3978 real_decl_result
3979 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
3980 current_function_decl);
3981 #else
3982 real_decl_result
3983 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
3984 current_function_decl);
3985 #endif
3986 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
3987 emit_move_insn (real_decl_result,
3988 DECL_RTL (DECL_RESULT (current_function_decl)));
3989 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
3990 }
3991
3992 /* If returning a structure, arrange to return the address of the value
3993 in a place where debuggers expect to find it.
3994
3995 If returning a structure PCC style,
3996 the caller also depends on this value.
3997 And current_function_returns_pcc_struct is not necessarily set. */
3998 if (current_function_returns_struct
3999 || current_function_returns_pcc_struct)
4000 {
4001 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4002 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4003 #ifdef FUNCTION_OUTGOING_VALUE
4004 rtx outgoing
4005 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4006 current_function_decl);
4007 #else
4008 rtx outgoing
4009 = FUNCTION_VALUE (build_pointer_type (type),
4010 current_function_decl);
4011 #endif
4012
4013 /* Mark this as a function return value so integrate will delete the
4014 assignment and USE below when inlining this function. */
4015 REG_FUNCTION_VALUE_P (outgoing) = 1;
4016
4017 emit_move_insn (outgoing, value_address);
4018 use_variable (outgoing);
4019 }
4020
4021 /* Output a return insn if we are using one.
4022 Otherwise, let the rtl chain end here, to drop through
4023 into the epilogue. */
4024
4025 #ifdef HAVE_return
4026 if (HAVE_return)
4027 {
4028 emit_jump_insn (gen_return ());
4029 emit_barrier ();
4030 }
4031 #endif
4032
4033 /* Fix up any gotos that jumped out to the outermost
4034 binding level of the function.
4035 Must follow emitting RETURN_LABEL. */
4036
4037 /* If you have any cleanups to do at this point,
4038 and they need to create temporary variables,
4039 then you will lose. */
4040 fixup_gotos (0, 0, 0, get_insns (), 0);
4041 }
This page took 7.092863 seconds and 6 git commands to generate.