]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
Make gcc use its own obstack.o.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91, 92, 1993 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
25
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
29
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
34
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
39
40 #include "config.h"
41
42 #include <stdio.h>
43
44 #include "rtl.h"
45 #include "tree.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57
58 /* Round a value to the lowest integer less than it that is a multiple of
59 the required alignment. Avoid using division in case the value is
60 negative. Assume the alignment is a power of two. */
61 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
62
63 /* Similar, but round to the next highest integer that meets the
64 alignment. */
65 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
66
67 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
68 during rtl generation. If they are different register numbers, this is
69 always true. It may also be true if
70 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
71 generation. See fix_lexical_addr for details. */
72
73 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
74 #define NEED_SEPARATE_AP
75 #endif
76
77 /* Number of bytes of args popped by function being compiled on its return.
78 Zero if no bytes are to be popped.
79 May affect compilation of return insn or of function epilogue. */
80
81 int current_function_pops_args;
82
83 /* Nonzero if function being compiled needs to be given an address
84 where the value should be stored. */
85
86 int current_function_returns_struct;
87
88 /* Nonzero if function being compiled needs to
89 return the address of where it has put a structure value. */
90
91 int current_function_returns_pcc_struct;
92
93 /* Nonzero if function being compiled needs to be passed a static chain. */
94
95 int current_function_needs_context;
96
97 /* Nonzero if function being compiled can call setjmp. */
98
99 int current_function_calls_setjmp;
100
101 /* Nonzero if function being compiled can call longjmp. */
102
103 int current_function_calls_longjmp;
104
105 /* Nonzero if function being compiled receives nonlocal gotos
106 from nested functions. */
107
108 int current_function_has_nonlocal_label;
109
110 /* Nonzero if function being compiled contains nested functions. */
111
112 int current_function_contains_functions;
113
114 /* Nonzero if function being compiled can call alloca,
115 either as a subroutine or builtin. */
116
117 int current_function_calls_alloca;
118
119 /* Nonzero if the current function returns a pointer type */
120
121 int current_function_returns_pointer;
122
123 /* If some insns can be deferred to the delay slots of the epilogue, the
124 delay list for them is recorded here. */
125
126 rtx current_function_epilogue_delay_list;
127
128 /* If function's args have a fixed size, this is that size, in bytes.
129 Otherwise, it is -1.
130 May affect compilation of return insn or of function epilogue. */
131
132 int current_function_args_size;
133
134 /* # bytes the prologue should push and pretend that the caller pushed them.
135 The prologue must do this, but only if parms can be passed in registers. */
136
137 int current_function_pretend_args_size;
138
139 /* # of bytes of outgoing arguments required to be pushed by the prologue.
140 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
141 and no stack adjusts will be done on function calls. */
142
143 int current_function_outgoing_args_size;
144
145 /* This is the offset from the arg pointer to the place where the first
146 anonymous arg can be found, if there is one. */
147
148 rtx current_function_arg_offset_rtx;
149
150 /* Nonzero if current function uses varargs.h or equivalent.
151 Zero for functions that use stdarg.h. */
152
153 int current_function_varargs;
154
155 /* Quantities of various kinds of registers
156 used for the current function's args. */
157
158 CUMULATIVE_ARGS current_function_args_info;
159
160 /* Name of function now being compiled. */
161
162 char *current_function_name;
163
164 /* If non-zero, an RTL expression for that location at which the current
165 function returns its result. Always equal to
166 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
167 independently of the tree structures. */
168
169 rtx current_function_return_rtx;
170
171 /* Nonzero if the current function uses the constant pool. */
172
173 int current_function_uses_const_pool;
174
175 /* Nonzero if the current function uses pic_offset_table_rtx. */
176 int current_function_uses_pic_offset_table;
177
178 /* The arg pointer hard register, or the pseudo into which it was copied. */
179 rtx current_function_internal_arg_pointer;
180
181 /* The FUNCTION_DECL for an inline function currently being expanded. */
182 tree inline_function_decl;
183
184 /* Number of function calls seen so far in current function. */
185
186 int function_call_count;
187
188 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
189 (labels to which there can be nonlocal gotos from nested functions)
190 in this function. */
191
192 tree nonlocal_labels;
193
194 /* RTX for stack slot that holds the current handler for nonlocal gotos.
195 Zero when function does not have nonlocal labels. */
196
197 rtx nonlocal_goto_handler_slot;
198
199 /* RTX for stack slot that holds the stack pointer value to restore
200 for a nonlocal goto.
201 Zero when function does not have nonlocal labels. */
202
203 rtx nonlocal_goto_stack_level;
204
205 /* Label that will go on parm cleanup code, if any.
206 Jumping to this label runs cleanup code for parameters, if
207 such code must be run. Following this code is the logical return label. */
208
209 rtx cleanup_label;
210
211 /* Label that will go on function epilogue.
212 Jumping to this label serves as a "return" instruction
213 on machines which require execution of the epilogue on all returns. */
214
215 rtx return_label;
216
217 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
218 So we can mark them all live at the end of the function, if nonopt. */
219 rtx save_expr_regs;
220
221 /* List (chain of EXPR_LISTs) of all stack slots in this function.
222 Made for the sake of unshare_all_rtl. */
223 rtx stack_slot_list;
224
225 /* Chain of all RTL_EXPRs that have insns in them. */
226 tree rtl_expr_chain;
227
228 /* Label to jump back to for tail recursion, or 0 if we have
229 not yet needed one for this function. */
230 rtx tail_recursion_label;
231
232 /* Place after which to insert the tail_recursion_label if we need one. */
233 rtx tail_recursion_reentry;
234
235 /* Location at which to save the argument pointer if it will need to be
236 referenced. There are two cases where this is done: if nonlocal gotos
237 exist, or if vars stored at an offset from the argument pointer will be
238 needed by inner routines. */
239
240 rtx arg_pointer_save_area;
241
242 /* Offset to end of allocated area of stack frame.
243 If stack grows down, this is the address of the last stack slot allocated.
244 If stack grows up, this is the address for the next slot. */
245 int frame_offset;
246
247 /* List (chain of TREE_LISTs) of static chains for containing functions.
248 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
249 in an RTL_EXPR in the TREE_VALUE. */
250 static tree context_display;
251
252 /* List (chain of TREE_LISTs) of trampolines for nested functions.
253 The trampoline sets up the static chain and jumps to the function.
254 We supply the trampoline's address when the function's address is requested.
255
256 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
257 in an RTL_EXPR in the TREE_VALUE. */
258 static tree trampoline_list;
259
260 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
261 static rtx parm_birth_insn;
262
263 #if 0
264 /* Nonzero if a stack slot has been generated whose address is not
265 actually valid. It means that the generated rtl must all be scanned
266 to detect and correct the invalid addresses where they occur. */
267 static int invalid_stack_slot;
268 #endif
269
270 /* Last insn of those whose job was to put parms into their nominal homes. */
271 static rtx last_parm_insn;
272
273 /* 1 + last pseudo register number used for loading a copy
274 of a parameter of this function. */
275 static int max_parm_reg;
276
277 /* Vector indexed by REGNO, containing location on stack in which
278 to put the parm which is nominally in pseudo register REGNO,
279 if we discover that that parm must go in the stack. */
280 static rtx *parm_reg_stack_loc;
281
282 #if 0 /* Turned off because 0 seems to work just as well. */
283 /* Cleanup lists are required for binding levels regardless of whether
284 that binding level has cleanups or not. This node serves as the
285 cleanup list whenever an empty list is required. */
286 static tree empty_cleanup_list;
287 #endif
288
289 /* Nonzero once virtual register instantiation has been done.
290 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
291 static int virtuals_instantiated;
292
293 /* Nonzero if we need to distinguish between the return value of this function
294 and the return value of a function called by this function. This helps
295 integrate.c */
296
297 extern int rtx_equal_function_value_matters;
298
299 void fixup_gotos ();
300
301 static tree round_down ();
302 static rtx round_trampoline_addr ();
303 static rtx fixup_stack_1 ();
304 static void fixup_var_refs ();
305 static void fixup_var_refs_insns ();
306 static void fixup_var_refs_1 ();
307 static void optimize_bit_field ();
308 static void instantiate_decls ();
309 static void instantiate_decls_1 ();
310 static void instantiate_decl ();
311 static int instantiate_virtual_regs_1 ();
312 static rtx fixup_memory_subreg ();
313 static rtx walk_fixup_memory_subreg ();
314 \f
315 /* In order to evaluate some expressions, such as function calls returning
316 structures in memory, we need to temporarily allocate stack locations.
317 We record each allocated temporary in the following structure.
318
319 Associated with each temporary slot is a nesting level. When we pop up
320 one level, all temporaries associated with the previous level are freed.
321 Normally, all temporaries are freed after the execution of the statement
322 in which they were created. However, if we are inside a ({...}) grouping,
323 the result may be in a temporary and hence must be preserved. If the
324 result could be in a temporary, we preserve it if we can determine which
325 one it is in. If we cannot determine which temporary may contain the
326 result, all temporaries are preserved. A temporary is preserved by
327 pretending it was allocated at the previous nesting level.
328
329 Automatic variables are also assigned temporary slots, at the nesting
330 level where they are defined. They are marked a "kept" so that
331 free_temp_slots will not free them. */
332
333 struct temp_slot
334 {
335 /* Points to next temporary slot. */
336 struct temp_slot *next;
337 /* The rtx to used to reference the slot. */
338 rtx slot;
339 /* The size, in units, of the slot. */
340 int size;
341 /* Non-zero if this temporary is currently in use. */
342 char in_use;
343 /* Nesting level at which this slot is being used. */
344 int level;
345 /* Non-zero if this should survive a call to free_temp_slots. */
346 int keep;
347 };
348
349 /* List of all temporaries allocated, both available and in use. */
350
351 struct temp_slot *temp_slots;
352
353 /* Current nesting level for temporaries. */
354
355 int temp_slot_level;
356 \f
357 /* Pointer to chain of `struct function' for containing functions. */
358 struct function *outer_function_chain;
359
360 /* Given a function decl for a containing function,
361 return the `struct function' for it. */
362
363 struct function *
364 find_function_data (decl)
365 tree decl;
366 {
367 struct function *p;
368 for (p = outer_function_chain; p; p = p->next)
369 if (p->decl == decl)
370 return p;
371 abort ();
372 }
373
374 /* Save the current context for compilation of a nested function.
375 This is called from language-specific code.
376 The caller is responsible for saving any language-specific status,
377 since this function knows only about language-independent variables. */
378
379 void
380 push_function_context ()
381 {
382 struct function *p = (struct function *) xmalloc (sizeof (struct function));
383
384 p->next = outer_function_chain;
385 outer_function_chain = p;
386
387 p->name = current_function_name;
388 p->decl = current_function_decl;
389 p->pops_args = current_function_pops_args;
390 p->returns_struct = current_function_returns_struct;
391 p->returns_pcc_struct = current_function_returns_pcc_struct;
392 p->needs_context = current_function_needs_context;
393 p->calls_setjmp = current_function_calls_setjmp;
394 p->calls_longjmp = current_function_calls_longjmp;
395 p->calls_alloca = current_function_calls_alloca;
396 p->has_nonlocal_label = current_function_has_nonlocal_label;
397 p->args_size = current_function_args_size;
398 p->pretend_args_size = current_function_pretend_args_size;
399 p->arg_offset_rtx = current_function_arg_offset_rtx;
400 p->uses_const_pool = current_function_uses_const_pool;
401 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
402 p->internal_arg_pointer = current_function_internal_arg_pointer;
403 p->max_parm_reg = max_parm_reg;
404 p->parm_reg_stack_loc = parm_reg_stack_loc;
405 p->outgoing_args_size = current_function_outgoing_args_size;
406 p->return_rtx = current_function_return_rtx;
407 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
408 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
409 p->nonlocal_labels = nonlocal_labels;
410 p->cleanup_label = cleanup_label;
411 p->return_label = return_label;
412 p->save_expr_regs = save_expr_regs;
413 p->stack_slot_list = stack_slot_list;
414 p->parm_birth_insn = parm_birth_insn;
415 p->frame_offset = frame_offset;
416 p->tail_recursion_label = tail_recursion_label;
417 p->tail_recursion_reentry = tail_recursion_reentry;
418 p->arg_pointer_save_area = arg_pointer_save_area;
419 p->rtl_expr_chain = rtl_expr_chain;
420 p->last_parm_insn = last_parm_insn;
421 p->context_display = context_display;
422 p->trampoline_list = trampoline_list;
423 p->function_call_count = function_call_count;
424 p->temp_slots = temp_slots;
425 p->temp_slot_level = temp_slot_level;
426 p->fixup_var_refs_queue = 0;
427 p->epilogue_delay_list = current_function_epilogue_delay_list;
428
429 save_tree_status (p);
430 save_storage_status (p);
431 save_emit_status (p);
432 init_emit ();
433 save_expr_status (p);
434 save_stmt_status (p);
435 save_varasm_status (p);
436 }
437
438 /* Restore the last saved context, at the end of a nested function.
439 This function is called from language-specific code. */
440
441 void
442 pop_function_context ()
443 {
444 struct function *p = outer_function_chain;
445
446 outer_function_chain = p->next;
447
448 current_function_name = p->name;
449 current_function_decl = p->decl;
450 current_function_pops_args = p->pops_args;
451 current_function_returns_struct = p->returns_struct;
452 current_function_returns_pcc_struct = p->returns_pcc_struct;
453 current_function_needs_context = p->needs_context;
454 current_function_calls_setjmp = p->calls_setjmp;
455 current_function_calls_longjmp = p->calls_longjmp;
456 current_function_calls_alloca = p->calls_alloca;
457 current_function_has_nonlocal_label = p->has_nonlocal_label;
458 current_function_contains_functions = 1;
459 current_function_args_size = p->args_size;
460 current_function_pretend_args_size = p->pretend_args_size;
461 current_function_arg_offset_rtx = p->arg_offset_rtx;
462 current_function_uses_const_pool = p->uses_const_pool;
463 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
464 current_function_internal_arg_pointer = p->internal_arg_pointer;
465 max_parm_reg = p->max_parm_reg;
466 parm_reg_stack_loc = p->parm_reg_stack_loc;
467 current_function_outgoing_args_size = p->outgoing_args_size;
468 current_function_return_rtx = p->return_rtx;
469 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
470 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
471 nonlocal_labels = p->nonlocal_labels;
472 cleanup_label = p->cleanup_label;
473 return_label = p->return_label;
474 save_expr_regs = p->save_expr_regs;
475 stack_slot_list = p->stack_slot_list;
476 parm_birth_insn = p->parm_birth_insn;
477 frame_offset = p->frame_offset;
478 tail_recursion_label = p->tail_recursion_label;
479 tail_recursion_reentry = p->tail_recursion_reentry;
480 arg_pointer_save_area = p->arg_pointer_save_area;
481 rtl_expr_chain = p->rtl_expr_chain;
482 last_parm_insn = p->last_parm_insn;
483 context_display = p->context_display;
484 trampoline_list = p->trampoline_list;
485 function_call_count = p->function_call_count;
486 temp_slots = p->temp_slots;
487 temp_slot_level = p->temp_slot_level;
488 current_function_epilogue_delay_list = p->epilogue_delay_list;
489
490 restore_tree_status (p);
491 restore_storage_status (p);
492 restore_expr_status (p);
493 restore_emit_status (p);
494 restore_stmt_status (p);
495 restore_varasm_status (p);
496
497 /* Finish doing put_var_into_stack for any of our variables
498 which became addressable during the nested function. */
499 {
500 struct var_refs_queue *queue = p->fixup_var_refs_queue;
501 for (; queue; queue = queue->next)
502 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
503 }
504
505 free (p);
506
507 /* Reset variables that have known state during rtx generation. */
508 rtx_equal_function_value_matters = 1;
509 virtuals_instantiated = 0;
510 }
511 \f
512 /* Allocate fixed slots in the stack frame of the current function. */
513
514 /* Return size needed for stack frame based on slots so far allocated.
515 This size counts from zero. It is not rounded to STACK_BOUNDARY;
516 the caller may have to do that. */
517
518 int
519 get_frame_size ()
520 {
521 #ifdef FRAME_GROWS_DOWNWARD
522 return -frame_offset;
523 #else
524 return frame_offset;
525 #endif
526 }
527
528 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
529 with machine mode MODE.
530
531 ALIGN controls the amount of alignment for the address of the slot:
532 0 means according to MODE,
533 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
534 positive specifies alignment boundary in bits.
535
536 We do not round to stack_boundary here. */
537
538 rtx
539 assign_stack_local (mode, size, align)
540 enum machine_mode mode;
541 int size;
542 int align;
543 {
544 register rtx x, addr;
545 int bigend_correction = 0;
546 int alignment;
547
548 if (align == 0)
549 {
550 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
551 if (mode == BLKmode)
552 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
553 }
554 else if (align == -1)
555 {
556 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
557 size = CEIL_ROUND (size, alignment);
558 }
559 else
560 alignment = align / BITS_PER_UNIT;
561
562 /* Round frame offset to that alignment.
563 We must be careful here, since FRAME_OFFSET might be negative and
564 division with a negative dividend isn't as well defined as we might
565 like. So we instead assume that ALIGNMENT is a power of two and
566 use logical operations which are unambiguous. */
567 #ifdef FRAME_GROWS_DOWNWARD
568 frame_offset = FLOOR_ROUND (frame_offset, alignment);
569 #else
570 frame_offset = CEIL_ROUND (frame_offset, alignment);
571 #endif
572
573 /* On a big-endian machine, if we are allocating more space than we will use,
574 use the least significant bytes of those that are allocated. */
575 #if BYTES_BIG_ENDIAN
576 if (mode != BLKmode)
577 bigend_correction = size - GET_MODE_SIZE (mode);
578 #endif
579
580 #ifdef FRAME_GROWS_DOWNWARD
581 frame_offset -= size;
582 #endif
583
584 /* If we have already instantiated virtual registers, return the actual
585 address relative to the frame pointer. */
586 if (virtuals_instantiated)
587 addr = plus_constant (frame_pointer_rtx,
588 (frame_offset + bigend_correction
589 + STARTING_FRAME_OFFSET));
590 else
591 addr = plus_constant (virtual_stack_vars_rtx,
592 frame_offset + bigend_correction);
593
594 #ifndef FRAME_GROWS_DOWNWARD
595 frame_offset += size;
596 #endif
597
598 x = gen_rtx (MEM, mode, addr);
599
600 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
601
602 return x;
603 }
604
605 /* Assign a stack slot in a containing function.
606 First three arguments are same as in preceding function.
607 The last argument specifies the function to allocate in. */
608
609 rtx
610 assign_outer_stack_local (mode, size, align, function)
611 enum machine_mode mode;
612 int size;
613 int align;
614 struct function *function;
615 {
616 register rtx x, addr;
617 int bigend_correction = 0;
618 int alignment;
619
620 /* Allocate in the memory associated with the function in whose frame
621 we are assigning. */
622 push_obstacks (function->function_obstack,
623 function->function_maybepermanent_obstack);
624
625 if (align == 0)
626 {
627 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
628 if (mode == BLKmode)
629 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
630 }
631 else if (align == -1)
632 {
633 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
634 size = CEIL_ROUND (size, alignment);
635 }
636 else
637 alignment = align / BITS_PER_UNIT;
638
639 /* Round frame offset to that alignment. */
640 #ifdef FRAME_GROWS_DOWNWARD
641 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
642 #else
643 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
644 #endif
645
646 /* On a big-endian machine, if we are allocating more space than we will use,
647 use the least significant bytes of those that are allocated. */
648 #if BYTES_BIG_ENDIAN
649 if (mode != BLKmode)
650 bigend_correction = size - GET_MODE_SIZE (mode);
651 #endif
652
653 #ifdef FRAME_GROWS_DOWNWARD
654 function->frame_offset -= size;
655 #endif
656 addr = plus_constant (virtual_stack_vars_rtx,
657 function->frame_offset + bigend_correction);
658 #ifndef FRAME_GROWS_DOWNWARD
659 function->frame_offset += size;
660 #endif
661
662 x = gen_rtx (MEM, mode, addr);
663
664 function->stack_slot_list
665 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
666
667 pop_obstacks ();
668
669 return x;
670 }
671 \f
672 /* Allocate a temporary stack slot and record it for possible later
673 reuse.
674
675 MODE is the machine mode to be given to the returned rtx.
676
677 SIZE is the size in units of the space required. We do no rounding here
678 since assign_stack_local will do any required rounding.
679
680 KEEP is non-zero if this slot is to be retained after a call to
681 free_temp_slots. Automatic variables for a block are allocated with this
682 flag. */
683
684 rtx
685 assign_stack_temp (mode, size, keep)
686 enum machine_mode mode;
687 int size;
688 int keep;
689 {
690 struct temp_slot *p, *best_p = 0;
691
692 /* First try to find an available, already-allocated temporary that is the
693 exact size we require. */
694 for (p = temp_slots; p; p = p->next)
695 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
696 break;
697
698 /* If we didn't find, one, try one that is larger than what we want. We
699 find the smallest such. */
700 if (p == 0)
701 for (p = temp_slots; p; p = p->next)
702 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
703 && (best_p == 0 || best_p->size > p->size))
704 best_p = p;
705
706 /* Make our best, if any, the one to use. */
707 if (best_p)
708 p = best_p;
709
710 /* If we still didn't find one, make a new temporary. */
711 if (p == 0)
712 {
713 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
714 p->size = size;
715 /* If the temp slot mode doesn't indicate the alignment,
716 use the largest possible, so no one will be disappointed. */
717 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
718 p->next = temp_slots;
719 temp_slots = p;
720 }
721
722 p->in_use = 1;
723 p->level = temp_slot_level;
724 p->keep = keep;
725 return p->slot;
726 }
727 \f
728 /* If X could be a reference to a temporary slot, mark that slot as belonging
729 to the to one level higher. If X matched one of our slots, just mark that
730 one. Otherwise, we can't easily predict which it is, so upgrade all of
731 them. Kept slots need not be touched.
732
733 This is called when an ({...}) construct occurs and a statement
734 returns a value in memory. */
735
736 void
737 preserve_temp_slots (x)
738 rtx x;
739 {
740 struct temp_slot *p;
741
742 /* If X is not in memory or is at a constant address, it cannot be in
743 a temporary slot. */
744 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
745 return;
746
747 /* First see if we can find a match. */
748 for (p = temp_slots; p; p = p->next)
749 if (p->in_use && x == p->slot)
750 {
751 p->level--;
752 return;
753 }
754
755 /* Otherwise, preserve all non-kept slots at this level. */
756 for (p = temp_slots; p; p = p->next)
757 if (p->in_use && p->level == temp_slot_level && ! p->keep)
758 p->level--;
759 }
760
761 /* Free all temporaries used so far. This is normally called at the end
762 of generating code for a statement. */
763
764 void
765 free_temp_slots ()
766 {
767 struct temp_slot *p;
768
769 for (p = temp_slots; p; p = p->next)
770 if (p->in_use && p->level == temp_slot_level && ! p->keep)
771 p->in_use = 0;
772 }
773
774 /* Push deeper into the nesting level for stack temporaries. */
775
776 void
777 push_temp_slots ()
778 {
779 /* For GNU C++, we must allow a sequence to be emitted anywhere in
780 the level where the sequence was started. By not changing levels
781 when the compiler is inside a sequence, the temporaries for the
782 sequence and the temporaries will not unwittingly conflict with
783 the temporaries for other sequences and/or code at that level. */
784 if (in_sequence_p ())
785 return;
786
787 temp_slot_level++;
788 }
789
790 /* Pop a temporary nesting level. All slots in use in the current level
791 are freed. */
792
793 void
794 pop_temp_slots ()
795 {
796 struct temp_slot *p;
797
798 /* See comment in push_temp_slots about why we don't change levels
799 in sequences. */
800 if (in_sequence_p ())
801 return;
802
803 for (p = temp_slots; p; p = p->next)
804 if (p->in_use && p->level == temp_slot_level)
805 p->in_use = 0;
806
807 temp_slot_level--;
808 }
809 \f
810 /* Retroactively move an auto variable from a register to a stack slot.
811 This is done when an address-reference to the variable is seen. */
812
813 void
814 put_var_into_stack (decl)
815 tree decl;
816 {
817 register rtx reg;
818 register rtx new = 0;
819 enum machine_mode promoted_mode, decl_mode;
820 struct function *function = 0;
821 tree context = decl_function_context (decl);
822
823 /* Get the current rtl used for this object and it's original mode. */
824 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
825
826 /* No need to do anything if decl has no rtx yet
827 since in that case caller is setting TREE_ADDRESSABLE
828 and a stack slot will be assigned when the rtl is made. */
829 if (reg == 0)
830 return;
831
832 /* Get the declared mode for this object. */
833 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
834 : DECL_MODE (decl));
835 /* Get the mode it's actually stored in. */
836 promoted_mode = GET_MODE (reg);
837
838 /* If this variable comes from an outer function,
839 find that function's saved context. */
840 if (context != current_function_decl)
841 for (function = outer_function_chain; function; function = function->next)
842 if (function->decl == context)
843 break;
844
845 /* If this is a variable-size object with a pseudo to address it,
846 put that pseudo into the stack, if the var is nonlocal. */
847 if (DECL_NONLOCAL (decl)
848 && GET_CODE (reg) == MEM
849 && GET_CODE (XEXP (reg, 0)) == REG
850 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
851 {
852 reg = XEXP (reg, 0);
853 decl_mode = promoted_mode = GET_MODE (reg);
854 }
855
856 if (GET_CODE (reg) != REG)
857 return;
858
859 if (function)
860 {
861 if (REGNO (reg) < function->max_parm_reg)
862 new = function->parm_reg_stack_loc[REGNO (reg)];
863 if (new == 0)
864 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
865 0, function);
866 }
867 else
868 {
869 if (REGNO (reg) < max_parm_reg)
870 new = parm_reg_stack_loc[REGNO (reg)];
871 if (new == 0)
872 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
873 }
874
875 XEXP (reg, 0) = XEXP (new, 0);
876 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
877 REG_USERVAR_P (reg) = 0;
878 PUT_CODE (reg, MEM);
879 PUT_MODE (reg, decl_mode);
880
881 /* If this is a memory ref that contains aggregate components,
882 mark it as such for cse and loop optimize. */
883 MEM_IN_STRUCT_P (reg)
884 = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
885 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
886 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
887 || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
888
889 /* Now make sure that all refs to the variable, previously made
890 when it was a register, are fixed up to be valid again. */
891 if (function)
892 {
893 struct var_refs_queue *temp;
894
895 /* Variable is inherited; fix it up when we get back to its function. */
896 push_obstacks (function->function_obstack,
897 function->function_maybepermanent_obstack);
898 temp
899 = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue));
900 temp->modified = reg;
901 temp->promoted_mode = promoted_mode;
902 temp->unsignedp = TREE_UNSIGNED (TREE_TYPE (decl));
903 temp->next = function->fixup_var_refs_queue;
904 function->fixup_var_refs_queue = temp;
905 pop_obstacks ();
906 }
907 else
908 /* Variable is local; fix it up now. */
909 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (TREE_TYPE (decl)));
910 }
911 \f
912 static void
913 fixup_var_refs (var, promoted_mode, unsignedp)
914 rtx var;
915 enum machine_mode promoted_mode;
916 int unsignedp;
917 {
918 tree pending;
919 rtx first_insn = get_insns ();
920 struct sequence_stack *stack = sequence_stack;
921 tree rtl_exps = rtl_expr_chain;
922
923 /* Must scan all insns for stack-refs that exceed the limit. */
924 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
925
926 /* Scan all pending sequences too. */
927 for (; stack; stack = stack->next)
928 {
929 push_to_sequence (stack->first);
930 fixup_var_refs_insns (var, promoted_mode, unsignedp,
931 stack->first, stack->next != 0);
932 /* Update remembered end of sequence
933 in case we added an insn at the end. */
934 stack->last = get_last_insn ();
935 end_sequence ();
936 }
937
938 /* Scan all waiting RTL_EXPRs too. */
939 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
940 {
941 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
942 if (seq != const0_rtx && seq != 0)
943 {
944 push_to_sequence (seq);
945 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
946 end_sequence ();
947 }
948 }
949 }
950 \f
951 /* This structure is used by the following two functions to record MEMs or
952 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
953 VAR as an address. We need to maintain this list in case two operands of
954 an insn were required to match; in that case we must ensure we use the
955 same replacement. */
956
957 struct fixup_replacement
958 {
959 rtx old;
960 rtx new;
961 struct fixup_replacement *next;
962 };
963
964 /* REPLACEMENTS is a pointer to a list of the above structures and X is
965 some part of an insn. Return a struct fixup_replacement whose OLD
966 value is equal to X. Allocate a new structure if no such entry exists. */
967
968 static struct fixup_replacement *
969 find_fixup_replacement (replacements, x)
970 struct fixup_replacement **replacements;
971 rtx x;
972 {
973 struct fixup_replacement *p;
974
975 /* See if we have already replaced this. */
976 for (p = *replacements; p && p->old != x; p = p->next)
977 ;
978
979 if (p == 0)
980 {
981 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
982 p->old = x;
983 p->new = 0;
984 p->next = *replacements;
985 *replacements = p;
986 }
987
988 return p;
989 }
990
991 /* Scan the insn-chain starting with INSN for refs to VAR
992 and fix them up. TOPLEVEL is nonzero if this chain is the
993 main chain of insns for the current function. */
994
995 static void
996 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
997 rtx var;
998 enum machine_mode promoted_mode;
999 int unsignedp;
1000 rtx insn;
1001 int toplevel;
1002 {
1003 rtx call_dest = 0;
1004
1005 while (insn)
1006 {
1007 rtx next = NEXT_INSN (insn);
1008 rtx note;
1009 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1010 {
1011 /* The insn to load VAR from a home in the arglist
1012 is now a no-op. When we see it, just delete it. */
1013 if (toplevel
1014 && GET_CODE (PATTERN (insn)) == SET
1015 && SET_DEST (PATTERN (insn)) == var
1016 /* If this represents the result of an insn group,
1017 don't delete the insn. */
1018 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1019 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1020 {
1021 /* In unoptimized compilation, we shouldn't call delete_insn
1022 except in jump.c doing warnings. */
1023 PUT_CODE (insn, NOTE);
1024 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1025 NOTE_SOURCE_FILE (insn) = 0;
1026 if (insn == last_parm_insn)
1027 last_parm_insn = PREV_INSN (next);
1028 }
1029 else
1030 {
1031 struct fixup_replacement *replacements = 0;
1032 rtx next_insn = NEXT_INSN (insn);
1033
1034 #ifdef SMALL_REGISTER_CLASSES
1035 /* If the insn that copies the results of a CALL_INSN
1036 into a pseudo now references VAR, we have to use an
1037 intermediate pseudo since we want the life of the
1038 return value register to be only a single insn.
1039
1040 If we don't use an intermediate pseudo, such things as
1041 address computations to make the address of VAR valid
1042 if it is not can be placed beween the CALL_INSN and INSN.
1043
1044 To make sure this doesn't happen, we record the destination
1045 of the CALL_INSN and see if the next insn uses both that
1046 and VAR. */
1047
1048 if (call_dest != 0 && GET_CODE (insn) == INSN
1049 && reg_mentioned_p (var, PATTERN (insn))
1050 && reg_mentioned_p (call_dest, PATTERN (insn)))
1051 {
1052 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1053
1054 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1055
1056 PATTERN (insn) = replace_rtx (PATTERN (insn),
1057 call_dest, temp);
1058 }
1059
1060 if (GET_CODE (insn) == CALL_INSN
1061 && GET_CODE (PATTERN (insn)) == SET)
1062 call_dest = SET_DEST (PATTERN (insn));
1063 else if (GET_CODE (insn) == CALL_INSN
1064 && GET_CODE (PATTERN (insn)) == PARALLEL
1065 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1066 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1067 else
1068 call_dest = 0;
1069 #endif
1070
1071 /* See if we have to do anything to INSN now that VAR is in
1072 memory. If it needs to be loaded into a pseudo, use a single
1073 pseudo for the entire insn in case there is a MATCH_DUP
1074 between two operands. We pass a pointer to the head of
1075 a list of struct fixup_replacements. If fixup_var_refs_1
1076 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1077 it will record them in this list.
1078
1079 If it allocated a pseudo for any replacement, we copy into
1080 it here. */
1081
1082 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1083 &replacements);
1084
1085 /* If this is last_parm_insn, and any instructions were output
1086 after it to fix it up, then we must set last_parm_insn to
1087 the last such instruction emitted. */
1088 if (insn == last_parm_insn)
1089 last_parm_insn = PREV_INSN (next_insn);
1090
1091 while (replacements)
1092 {
1093 if (GET_CODE (replacements->new) == REG)
1094 {
1095 rtx insert_before;
1096 rtx seq;
1097
1098 /* OLD might be a (subreg (mem)). */
1099 if (GET_CODE (replacements->old) == SUBREG)
1100 replacements->old
1101 = fixup_memory_subreg (replacements->old, insn, 0);
1102 else
1103 replacements->old
1104 = fixup_stack_1 (replacements->old, insn);
1105
1106 /* We can not separate USE insns from the CALL_INSN
1107 that they belong to. If this is a CALL_INSN, insert
1108 the move insn before the USE insns preceding it
1109 instead of immediately before the insn. */
1110 if (GET_CODE (insn) == CALL_INSN)
1111 {
1112 insert_before = insn;
1113 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1114 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1115 insert_before = PREV_INSN (insert_before);
1116 }
1117 else
1118 insert_before = insn;
1119
1120 /* If we are changing the mode, do a conversion.
1121 This might be wasteful, but combine.c will
1122 eliminate much of the waste. */
1123
1124 if (GET_MODE (replacements->new)
1125 != GET_MODE (replacements->old))
1126 {
1127 start_sequence ();
1128 convert_move (replacements->new,
1129 replacements->old, unsignedp);
1130 seq = gen_sequence ();
1131 end_sequence ();
1132 }
1133 else
1134 seq = gen_move_insn (replacements->new,
1135 replacements->old);
1136
1137 emit_insn_before (seq, insert_before);
1138 }
1139
1140 replacements = replacements->next;
1141 }
1142 }
1143
1144 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1145 But don't touch other insns referred to by reg-notes;
1146 we will get them elsewhere. */
1147 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1148 if (GET_CODE (note) != INSN_LIST)
1149 XEXP (note, 0)
1150 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1151 }
1152 insn = next;
1153 }
1154 }
1155 \f
1156 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1157 See if the rtx expression at *LOC in INSN needs to be changed.
1158
1159 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1160 contain a list of original rtx's and replacements. If we find that we need
1161 to modify this insn by replacing a memory reference with a pseudo or by
1162 making a new MEM to implement a SUBREG, we consult that list to see if
1163 we have already chosen a replacement. If none has already been allocated,
1164 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1165 or the SUBREG, as appropriate, to the pseudo. */
1166
1167 static void
1168 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1169 register rtx var;
1170 enum machine_mode promoted_mode;
1171 register rtx *loc;
1172 rtx insn;
1173 struct fixup_replacement **replacements;
1174 {
1175 register int i;
1176 register rtx x = *loc;
1177 RTX_CODE code = GET_CODE (x);
1178 register char *fmt;
1179 register rtx tem, tem1;
1180 struct fixup_replacement *replacement;
1181
1182 switch (code)
1183 {
1184 case MEM:
1185 if (var == x)
1186 {
1187 /* If we already have a replacement, use it. Otherwise,
1188 try to fix up this address in case it is invalid. */
1189
1190 replacement = find_fixup_replacement (replacements, var);
1191 if (replacement->new)
1192 {
1193 *loc = replacement->new;
1194 return;
1195 }
1196
1197 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1198
1199 /* Unless we are forcing memory to register or we changed the mode,
1200 we can leave things the way they are if the insn is valid. */
1201
1202 INSN_CODE (insn) = -1;
1203 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1204 && recog_memoized (insn) >= 0)
1205 return;
1206
1207 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1208 return;
1209 }
1210
1211 /* If X contains VAR, we need to unshare it here so that we update
1212 each occurrence separately. But all identical MEMs in one insn
1213 must be replaced with the same rtx because of the possibility of
1214 MATCH_DUPs. */
1215
1216 if (reg_mentioned_p (var, x))
1217 {
1218 replacement = find_fixup_replacement (replacements, x);
1219 if (replacement->new == 0)
1220 replacement->new = copy_most_rtx (x, var);
1221
1222 *loc = x = replacement->new;
1223 }
1224 break;
1225
1226 case REG:
1227 case CC0:
1228 case PC:
1229 case CONST_INT:
1230 case CONST:
1231 case SYMBOL_REF:
1232 case LABEL_REF:
1233 case CONST_DOUBLE:
1234 return;
1235
1236 case SIGN_EXTRACT:
1237 case ZERO_EXTRACT:
1238 /* Note that in some cases those types of expressions are altered
1239 by optimize_bit_field, and do not survive to get here. */
1240 if (XEXP (x, 0) == var
1241 || (GET_CODE (XEXP (x, 0)) == SUBREG
1242 && SUBREG_REG (XEXP (x, 0)) == var))
1243 {
1244 /* Get TEM as a valid MEM in the mode presently in the insn.
1245
1246 We don't worry about the possibility of MATCH_DUP here; it
1247 is highly unlikely and would be tricky to handle. */
1248
1249 tem = XEXP (x, 0);
1250 if (GET_CODE (tem) == SUBREG)
1251 tem = fixup_memory_subreg (tem, insn, 1);
1252 tem = fixup_stack_1 (tem, insn);
1253
1254 /* Unless we want to load from memory, get TEM into the proper mode
1255 for an extract from memory. This can only be done if the
1256 extract is at a constant position and length. */
1257
1258 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1259 && GET_CODE (XEXP (x, 2)) == CONST_INT
1260 && ! mode_dependent_address_p (XEXP (tem, 0))
1261 && ! MEM_VOLATILE_P (tem))
1262 {
1263 enum machine_mode wanted_mode = VOIDmode;
1264 enum machine_mode is_mode = GET_MODE (tem);
1265 int width = INTVAL (XEXP (x, 1));
1266 int pos = INTVAL (XEXP (x, 2));
1267
1268 #ifdef HAVE_extzv
1269 if (GET_CODE (x) == ZERO_EXTRACT)
1270 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1271 #endif
1272 #ifdef HAVE_extv
1273 if (GET_CODE (x) == SIGN_EXTRACT)
1274 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1275 #endif
1276 /* If we have a narrower mode, we can do something. */
1277 if (wanted_mode != VOIDmode
1278 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1279 {
1280 int offset = pos / BITS_PER_UNIT;
1281 rtx old_pos = XEXP (x, 2);
1282 rtx newmem;
1283
1284 /* If the bytes and bits are counted differently, we
1285 must adjust the offset. */
1286 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1287 offset = (GET_MODE_SIZE (is_mode)
1288 - GET_MODE_SIZE (wanted_mode) - offset);
1289 #endif
1290
1291 pos %= GET_MODE_BITSIZE (wanted_mode);
1292
1293 newmem = gen_rtx (MEM, wanted_mode,
1294 plus_constant (XEXP (tem, 0), offset));
1295 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1296 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1297 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1298
1299 /* Make the change and see if the insn remains valid. */
1300 INSN_CODE (insn) = -1;
1301 XEXP (x, 0) = newmem;
1302 XEXP (x, 2) = GEN_INT (pos);
1303
1304 if (recog_memoized (insn) >= 0)
1305 return;
1306
1307 /* Otherwise, restore old position. XEXP (x, 0) will be
1308 restored later. */
1309 XEXP (x, 2) = old_pos;
1310 }
1311 }
1312
1313 /* If we get here, the bitfield extract insn can't accept a memory
1314 reference. Copy the input into a register. */
1315
1316 tem1 = gen_reg_rtx (GET_MODE (tem));
1317 emit_insn_before (gen_move_insn (tem1, tem), insn);
1318 XEXP (x, 0) = tem1;
1319 return;
1320 }
1321 break;
1322
1323 case SUBREG:
1324 if (SUBREG_REG (x) == var)
1325 {
1326 /* If this is a special SUBREG made because VAR was promoted
1327 from a wider mode, replace it with VAR and call ourself
1328 recursively, this time saying that the object previously
1329 had its current mode (by virtue of the SUBREG). */
1330
1331 if (SUBREG_PROMOTED_VAR_P (x))
1332 {
1333 *loc = var;
1334 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1335 return;
1336 }
1337
1338 /* If this SUBREG makes VAR wider, it has become a paradoxical
1339 SUBREG with VAR in memory, but these aren't allowed at this
1340 stage of the compilation. So load VAR into a pseudo and take
1341 a SUBREG of that pseudo. */
1342 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1343 {
1344 replacement = find_fixup_replacement (replacements, var);
1345 if (replacement->new == 0)
1346 replacement->new = gen_reg_rtx (GET_MODE (var));
1347 SUBREG_REG (x) = replacement->new;
1348 return;
1349 }
1350
1351 /* See if we have already found a replacement for this SUBREG.
1352 If so, use it. Otherwise, make a MEM and see if the insn
1353 is recognized. If not, or if we should force MEM into a register,
1354 make a pseudo for this SUBREG. */
1355 replacement = find_fixup_replacement (replacements, x);
1356 if (replacement->new)
1357 {
1358 *loc = replacement->new;
1359 return;
1360 }
1361
1362 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1363
1364 INSN_CODE (insn) = -1;
1365 if (! flag_force_mem && recog_memoized (insn) >= 0)
1366 return;
1367
1368 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1369 return;
1370 }
1371 break;
1372
1373 case SET:
1374 /* First do special simplification of bit-field references. */
1375 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1376 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1377 optimize_bit_field (x, insn, 0);
1378 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1379 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1380 optimize_bit_field (x, insn, NULL_PTR);
1381
1382 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1383 insn into a pseudo and store the low part of the pseudo into VAR. */
1384 if (GET_CODE (SET_DEST (x)) == SUBREG
1385 && SUBREG_REG (SET_DEST (x)) == var
1386 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1387 > GET_MODE_SIZE (GET_MODE (var))))
1388 {
1389 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1390 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1391 tem)),
1392 insn);
1393 break;
1394 }
1395
1396 {
1397 rtx dest = SET_DEST (x);
1398 rtx src = SET_SRC (x);
1399 rtx outerdest = dest;
1400
1401 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1402 || GET_CODE (dest) == SIGN_EXTRACT
1403 || GET_CODE (dest) == ZERO_EXTRACT)
1404 dest = XEXP (dest, 0);
1405
1406 if (GET_CODE (src) == SUBREG)
1407 src = XEXP (src, 0);
1408
1409 /* If VAR does not appear at the top level of the SET
1410 just scan the lower levels of the tree. */
1411
1412 if (src != var && dest != var)
1413 break;
1414
1415 /* We will need to rerecognize this insn. */
1416 INSN_CODE (insn) = -1;
1417
1418 #ifdef HAVE_insv
1419 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1420 {
1421 /* Since this case will return, ensure we fixup all the
1422 operands here. */
1423 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1424 insn, replacements);
1425 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1426 insn, replacements);
1427 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1428 insn, replacements);
1429
1430 tem = XEXP (outerdest, 0);
1431
1432 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1433 that may appear inside a ZERO_EXTRACT.
1434 This was legitimate when the MEM was a REG. */
1435 if (GET_CODE (tem) == SUBREG
1436 && SUBREG_REG (tem) == var)
1437 tem = fixup_memory_subreg (tem, insn, 1);
1438 else
1439 tem = fixup_stack_1 (tem, insn);
1440
1441 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1442 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1443 && ! mode_dependent_address_p (XEXP (tem, 0))
1444 && ! MEM_VOLATILE_P (tem))
1445 {
1446 enum machine_mode wanted_mode
1447 = insn_operand_mode[(int) CODE_FOR_insv][0];
1448 enum machine_mode is_mode = GET_MODE (tem);
1449 int width = INTVAL (XEXP (outerdest, 1));
1450 int pos = INTVAL (XEXP (outerdest, 2));
1451
1452 /* If we have a narrower mode, we can do something. */
1453 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1454 {
1455 int offset = pos / BITS_PER_UNIT;
1456 rtx old_pos = XEXP (outerdest, 2);
1457 rtx newmem;
1458
1459 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1460 offset = (GET_MODE_SIZE (is_mode)
1461 - GET_MODE_SIZE (wanted_mode) - offset);
1462 #endif
1463
1464 pos %= GET_MODE_BITSIZE (wanted_mode);
1465
1466 newmem = gen_rtx (MEM, wanted_mode,
1467 plus_constant (XEXP (tem, 0), offset));
1468 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1469 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1470 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1471
1472 /* Make the change and see if the insn remains valid. */
1473 INSN_CODE (insn) = -1;
1474 XEXP (outerdest, 0) = newmem;
1475 XEXP (outerdest, 2) = GEN_INT (pos);
1476
1477 if (recog_memoized (insn) >= 0)
1478 return;
1479
1480 /* Otherwise, restore old position. XEXP (x, 0) will be
1481 restored later. */
1482 XEXP (outerdest, 2) = old_pos;
1483 }
1484 }
1485
1486 /* If we get here, the bit-field store doesn't allow memory
1487 or isn't located at a constant position. Load the value into
1488 a register, do the store, and put it back into memory. */
1489
1490 tem1 = gen_reg_rtx (GET_MODE (tem));
1491 emit_insn_before (gen_move_insn (tem1, tem), insn);
1492 emit_insn_after (gen_move_insn (tem, tem1), insn);
1493 XEXP (outerdest, 0) = tem1;
1494 return;
1495 }
1496 #endif
1497
1498 /* STRICT_LOW_PART is a no-op on memory references
1499 and it can cause combinations to be unrecognizable,
1500 so eliminate it. */
1501
1502 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1503 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1504
1505 /* A valid insn to copy VAR into or out of a register
1506 must be left alone, to avoid an infinite loop here.
1507 If the reference to VAR is by a subreg, fix that up,
1508 since SUBREG is not valid for a memref.
1509 Also fix up the address of the stack slot.
1510
1511 Note that we must not try to recognize the insn until
1512 after we know that we have valid addresses and no
1513 (subreg (mem ...) ...) constructs, since these interfere
1514 with determining the validity of the insn. */
1515
1516 if ((SET_SRC (x) == var
1517 || (GET_CODE (SET_SRC (x)) == SUBREG
1518 && SUBREG_REG (SET_SRC (x)) == var))
1519 && (GET_CODE (SET_DEST (x)) == REG
1520 || (GET_CODE (SET_DEST (x)) == SUBREG
1521 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1522 && x == single_set (PATTERN (insn)))
1523 {
1524 rtx pat;
1525
1526 replacement = find_fixup_replacement (replacements, SET_SRC (x));
1527 if (replacement->new)
1528 SET_SRC (x) = replacement->new;
1529 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1530 SET_SRC (x) = replacement->new
1531 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1532 else
1533 SET_SRC (x) = replacement->new
1534 = fixup_stack_1 (SET_SRC (x), insn);
1535
1536 if (recog_memoized (insn) >= 0)
1537 return;
1538
1539 /* INSN is not valid, but we know that we want to
1540 copy SET_SRC (x) to SET_DEST (x) in some way. So
1541 we generate the move and see whether it requires more
1542 than one insn. If it does, we emit those insns and
1543 delete INSN. Otherwise, we an just replace the pattern
1544 of INSN; we have already verified above that INSN has
1545 no other function that to do X. */
1546
1547 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
1548 if (GET_CODE (pat) == SEQUENCE)
1549 {
1550 emit_insn_after (pat, insn);
1551 PUT_CODE (insn, NOTE);
1552 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1553 NOTE_SOURCE_FILE (insn) = 0;
1554 }
1555 else
1556 PATTERN (insn) = pat;
1557
1558 return;
1559 }
1560
1561 if ((SET_DEST (x) == var
1562 || (GET_CODE (SET_DEST (x)) == SUBREG
1563 && SUBREG_REG (SET_DEST (x)) == var))
1564 && (GET_CODE (SET_SRC (x)) == REG
1565 || (GET_CODE (SET_SRC (x)) == SUBREG
1566 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1567 && x == single_set (PATTERN (insn)))
1568 {
1569 rtx pat;
1570
1571 if (GET_CODE (SET_DEST (x)) == SUBREG)
1572 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1573 else
1574 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1575
1576 if (recog_memoized (insn) >= 0)
1577 return;
1578
1579 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
1580 if (GET_CODE (pat) == SEQUENCE)
1581 {
1582 emit_insn_after (pat, insn);
1583 PUT_CODE (insn, NOTE);
1584 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1585 NOTE_SOURCE_FILE (insn) = 0;
1586 }
1587 else
1588 PATTERN (insn) = pat;
1589
1590 return;
1591 }
1592
1593 /* Otherwise, storing into VAR must be handled specially
1594 by storing into a temporary and copying that into VAR
1595 with a new insn after this one. Note that this case
1596 will be used when storing into a promoted scalar since
1597 the insn will now have different modes on the input
1598 and output and hence will be invalid (except for the case
1599 of setting it to a constant, which does not need any
1600 change if it is valid). We generate extra code in that case,
1601 but combine.c will eliminate it. */
1602
1603 if (dest == var)
1604 {
1605 rtx temp;
1606 rtx fixeddest = SET_DEST (x);
1607
1608 /* STRICT_LOW_PART can be discarded, around a MEM. */
1609 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
1610 fixeddest = XEXP (fixeddest, 0);
1611 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
1612 if (GET_CODE (fixeddest) == SUBREG)
1613 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
1614 else
1615 fixeddest = fixup_stack_1 (fixeddest, insn);
1616
1617 temp = gen_reg_rtx (GET_MODE (SET_SRC (x)) == VOIDmode
1618 ? GET_MODE (fixeddest)
1619 : GET_MODE (SET_SRC (x)));
1620
1621 emit_insn_after (gen_move_insn (fixeddest,
1622 gen_lowpart (GET_MODE (fixeddest),
1623 temp)),
1624 insn);
1625
1626 SET_DEST (x) = temp;
1627 }
1628 }
1629 }
1630
1631 /* Nothing special about this RTX; fix its operands. */
1632
1633 fmt = GET_RTX_FORMAT (code);
1634 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1635 {
1636 if (fmt[i] == 'e')
1637 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
1638 if (fmt[i] == 'E')
1639 {
1640 register int j;
1641 for (j = 0; j < XVECLEN (x, i); j++)
1642 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
1643 insn, replacements);
1644 }
1645 }
1646 }
1647 \f
1648 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1649 return an rtx (MEM:m1 newaddr) which is equivalent.
1650 If any insns must be emitted to compute NEWADDR, put them before INSN.
1651
1652 UNCRITICAL nonzero means accept paradoxical subregs.
1653 This is used for subregs found inside of ZERO_EXTRACTs and in REG_NOTES. */
1654
1655 static rtx
1656 fixup_memory_subreg (x, insn, uncritical)
1657 rtx x;
1658 rtx insn;
1659 int uncritical;
1660 {
1661 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1662 rtx addr = XEXP (SUBREG_REG (x), 0);
1663 enum machine_mode mode = GET_MODE (x);
1664 rtx saved, result;
1665
1666 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1667 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1668 && ! uncritical)
1669 abort ();
1670
1671 #if BYTES_BIG_ENDIAN
1672 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1673 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1674 #endif
1675 addr = plus_constant (addr, offset);
1676 if (!flag_force_addr && memory_address_p (mode, addr))
1677 /* Shortcut if no insns need be emitted. */
1678 return change_address (SUBREG_REG (x), mode, addr);
1679 start_sequence ();
1680 result = change_address (SUBREG_REG (x), mode, addr);
1681 emit_insn_before (gen_sequence (), insn);
1682 end_sequence ();
1683 return result;
1684 }
1685
1686 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1687 Replace subexpressions of X in place.
1688 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1689 Otherwise return X, with its contents possibly altered.
1690
1691 If any insns must be emitted to compute NEWADDR, put them before INSN.
1692
1693 UNCRITICAL is as in fixup_memory_subreg. */
1694
1695 static rtx
1696 walk_fixup_memory_subreg (x, insn, uncritical)
1697 register rtx x;
1698 rtx insn;
1699 int uncritical;
1700 {
1701 register enum rtx_code code;
1702 register char *fmt;
1703 register int i;
1704
1705 if (x == 0)
1706 return 0;
1707
1708 code = GET_CODE (x);
1709
1710 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
1711 return fixup_memory_subreg (x, insn, uncritical);
1712
1713 /* Nothing special about this RTX; fix its operands. */
1714
1715 fmt = GET_RTX_FORMAT (code);
1716 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1717 {
1718 if (fmt[i] == 'e')
1719 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
1720 if (fmt[i] == 'E')
1721 {
1722 register int j;
1723 for (j = 0; j < XVECLEN (x, i); j++)
1724 XVECEXP (x, i, j)
1725 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
1726 }
1727 }
1728 return x;
1729 }
1730 \f
1731 #if 0
1732 /* Fix up any references to stack slots that are invalid memory addresses
1733 because they exceed the maximum range of a displacement. */
1734
1735 void
1736 fixup_stack_slots ()
1737 {
1738 register rtx insn;
1739
1740 /* Did we generate a stack slot that is out of range
1741 or otherwise has an invalid address? */
1742 if (invalid_stack_slot)
1743 {
1744 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1745 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1746 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1747 || GET_CODE (insn) == JUMP_INSN)
1748 fixup_stack_1 (PATTERN (insn), insn);
1749 }
1750 }
1751 #endif
1752
1753 /* For each memory ref within X, if it refers to a stack slot
1754 with an out of range displacement, put the address in a temp register
1755 (emitting new insns before INSN to load these registers)
1756 and alter the memory ref to use that register.
1757 Replace each such MEM rtx with a copy, to avoid clobberage. */
1758
1759 static rtx
1760 fixup_stack_1 (x, insn)
1761 rtx x;
1762 rtx insn;
1763 {
1764 register int i;
1765 register RTX_CODE code = GET_CODE (x);
1766 register char *fmt;
1767
1768 if (code == MEM)
1769 {
1770 register rtx ad = XEXP (x, 0);
1771 /* If we have address of a stack slot but it's not valid
1772 (displacement is too large), compute the sum in a register. */
1773 if (GET_CODE (ad) == PLUS
1774 && GET_CODE (XEXP (ad, 0)) == REG
1775 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1776 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
1777 || XEXP (ad, 0) == current_function_internal_arg_pointer)
1778 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1779 {
1780 rtx temp, seq;
1781 if (memory_address_p (GET_MODE (x), ad))
1782 return x;
1783
1784 start_sequence ();
1785 temp = copy_to_reg (ad);
1786 seq = gen_sequence ();
1787 end_sequence ();
1788 emit_insn_before (seq, insn);
1789 return change_address (x, VOIDmode, temp);
1790 }
1791 return x;
1792 }
1793
1794 fmt = GET_RTX_FORMAT (code);
1795 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1796 {
1797 if (fmt[i] == 'e')
1798 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1799 if (fmt[i] == 'E')
1800 {
1801 register int j;
1802 for (j = 0; j < XVECLEN (x, i); j++)
1803 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
1804 }
1805 }
1806 return x;
1807 }
1808 \f
1809 /* Optimization: a bit-field instruction whose field
1810 happens to be a byte or halfword in memory
1811 can be changed to a move instruction.
1812
1813 We call here when INSN is an insn to examine or store into a bit-field.
1814 BODY is the SET-rtx to be altered.
1815
1816 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
1817 (Currently this is called only from function.c, and EQUIV_MEM
1818 is always 0.) */
1819
1820 static void
1821 optimize_bit_field (body, insn, equiv_mem)
1822 rtx body;
1823 rtx insn;
1824 rtx *equiv_mem;
1825 {
1826 register rtx bitfield;
1827 int destflag;
1828 rtx seq = 0;
1829 enum machine_mode mode;
1830
1831 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
1832 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
1833 bitfield = SET_DEST (body), destflag = 1;
1834 else
1835 bitfield = SET_SRC (body), destflag = 0;
1836
1837 /* First check that the field being stored has constant size and position
1838 and is in fact a byte or halfword suitably aligned. */
1839
1840 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
1841 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
1842 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
1843 != BLKmode)
1844 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
1845 {
1846 register rtx memref = 0;
1847
1848 /* Now check that the containing word is memory, not a register,
1849 and that it is safe to change the machine mode. */
1850
1851 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
1852 memref = XEXP (bitfield, 0);
1853 else if (GET_CODE (XEXP (bitfield, 0)) == REG
1854 && equiv_mem != 0)
1855 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
1856 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1857 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
1858 memref = SUBREG_REG (XEXP (bitfield, 0));
1859 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
1860 && equiv_mem != 0
1861 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
1862 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
1863
1864 if (memref
1865 && ! mode_dependent_address_p (XEXP (memref, 0))
1866 && ! MEM_VOLATILE_P (memref))
1867 {
1868 /* Now adjust the address, first for any subreg'ing
1869 that we are now getting rid of,
1870 and then for which byte of the word is wanted. */
1871
1872 register int offset = INTVAL (XEXP (bitfield, 2));
1873 /* Adjust OFFSET to count bits from low-address byte. */
1874 #if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
1875 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
1876 - offset - INTVAL (XEXP (bitfield, 1)));
1877 #endif
1878 /* Adjust OFFSET to count bytes from low-address byte. */
1879 offset /= BITS_PER_UNIT;
1880 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
1881 {
1882 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
1883 #if BYTES_BIG_ENDIAN
1884 offset -= (MIN (UNITS_PER_WORD,
1885 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
1886 - MIN (UNITS_PER_WORD,
1887 GET_MODE_SIZE (GET_MODE (memref))));
1888 #endif
1889 }
1890
1891 memref = change_address (memref, mode,
1892 plus_constant (XEXP (memref, 0), offset));
1893
1894 /* Store this memory reference where
1895 we found the bit field reference. */
1896
1897 if (destflag)
1898 {
1899 validate_change (insn, &SET_DEST (body), memref, 1);
1900 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
1901 {
1902 rtx src = SET_SRC (body);
1903 while (GET_CODE (src) == SUBREG
1904 && SUBREG_WORD (src) == 0)
1905 src = SUBREG_REG (src);
1906 if (GET_MODE (src) != GET_MODE (memref))
1907 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
1908 validate_change (insn, &SET_SRC (body), src, 1);
1909 }
1910 else if (GET_MODE (SET_SRC (body)) != VOIDmode
1911 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
1912 /* This shouldn't happen because anything that didn't have
1913 one of these modes should have got converted explicitly
1914 and then referenced through a subreg.
1915 This is so because the original bit-field was
1916 handled by agg_mode and so its tree structure had
1917 the same mode that memref now has. */
1918 abort ();
1919 }
1920 else
1921 {
1922 rtx dest = SET_DEST (body);
1923
1924 while (GET_CODE (dest) == SUBREG
1925 && SUBREG_WORD (dest) == 0)
1926 dest = SUBREG_REG (dest);
1927
1928 validate_change (insn, &SET_DEST (body), dest, 1);
1929
1930 if (GET_MODE (dest) == GET_MODE (memref))
1931 validate_change (insn, &SET_SRC (body), memref, 1);
1932 else
1933 {
1934 /* Convert the mem ref to the destination mode. */
1935 rtx newreg = gen_reg_rtx (GET_MODE (dest));
1936
1937 start_sequence ();
1938 convert_move (newreg, memref,
1939 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
1940 seq = get_insns ();
1941 end_sequence ();
1942
1943 validate_change (insn, &SET_SRC (body), newreg, 1);
1944 }
1945 }
1946
1947 /* See if we can convert this extraction or insertion into
1948 a simple move insn. We might not be able to do so if this
1949 was, for example, part of a PARALLEL.
1950
1951 If we succeed, write out any needed conversions. If we fail,
1952 it is hard to guess why we failed, so don't do anything
1953 special; just let the optimization be suppressed. */
1954
1955 if (apply_change_group () && seq)
1956 emit_insns_before (seq, insn);
1957 }
1958 }
1959 }
1960 \f
1961 /* These routines are responsible for converting virtual register references
1962 to the actual hard register references once RTL generation is complete.
1963
1964 The following four variables are used for communication between the
1965 routines. They contain the offsets of the virtual registers from their
1966 respective hard registers. */
1967
1968 static int in_arg_offset;
1969 static int var_offset;
1970 static int dynamic_offset;
1971 static int out_arg_offset;
1972
1973 /* In most machines, the stack pointer register is equivalent to the bottom
1974 of the stack. */
1975
1976 #ifndef STACK_POINTER_OFFSET
1977 #define STACK_POINTER_OFFSET 0
1978 #endif
1979
1980 /* If not defined, pick an appropriate default for the offset of dynamically
1981 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1982 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1983
1984 #ifndef STACK_DYNAMIC_OFFSET
1985
1986 #ifdef ACCUMULATE_OUTGOING_ARGS
1987 /* The bottom of the stack points to the actual arguments. If
1988 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1989 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1990 stack space for register parameters is not pushed by the caller, but
1991 rather part of the fixed stack areas and hence not included in
1992 `current_function_outgoing_args_size'. Nevertheless, we must allow
1993 for it when allocating stack dynamic objects. */
1994
1995 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1996 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1997 (current_function_outgoing_args_size \
1998 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
1999
2000 #else
2001 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2002 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2003 #endif
2004
2005 #else
2006 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2007 #endif
2008 #endif
2009
2010 /* Pass through the INSNS of function FNDECL and convert virtual register
2011 references to hard register references. */
2012
2013 void
2014 instantiate_virtual_regs (fndecl, insns)
2015 tree fndecl;
2016 rtx insns;
2017 {
2018 rtx insn;
2019
2020 /* Compute the offsets to use for this function. */
2021 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2022 var_offset = STARTING_FRAME_OFFSET;
2023 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2024 out_arg_offset = STACK_POINTER_OFFSET;
2025
2026 /* Scan all variables and parameters of this function. For each that is
2027 in memory, instantiate all virtual registers if the result is a valid
2028 address. If not, we do it later. That will handle most uses of virtual
2029 regs on many machines. */
2030 instantiate_decls (fndecl, 1);
2031
2032 /* Initialize recognition, indicating that volatile is OK. */
2033 init_recog ();
2034
2035 /* Scan through all the insns, instantiating every virtual register still
2036 present. */
2037 for (insn = insns; insn; insn = NEXT_INSN (insn))
2038 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2039 || GET_CODE (insn) == CALL_INSN)
2040 {
2041 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2042 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2043 }
2044
2045 /* Now instantiate the remaining register equivalences for debugging info.
2046 These will not be valid addresses. */
2047 instantiate_decls (fndecl, 0);
2048
2049 /* Indicate that, from now on, assign_stack_local should use
2050 frame_pointer_rtx. */
2051 virtuals_instantiated = 1;
2052 }
2053
2054 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2055 all virtual registers in their DECL_RTL's.
2056
2057 If VALID_ONLY, do this only if the resulting address is still valid.
2058 Otherwise, always do it. */
2059
2060 static void
2061 instantiate_decls (fndecl, valid_only)
2062 tree fndecl;
2063 int valid_only;
2064 {
2065 tree decl;
2066
2067 if (DECL_INLINE (fndecl))
2068 /* When compiling an inline function, the obstack used for
2069 rtl allocation is the maybepermanent_obstack. Calling
2070 `resume_temporary_allocation' switches us back to that
2071 obstack while we process this function's parameters. */
2072 resume_temporary_allocation ();
2073
2074 /* Process all parameters of the function. */
2075 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2076 {
2077 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2078 valid_only);
2079 instantiate_decl (DECL_INCOMING_RTL (decl),
2080 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
2081 }
2082
2083 /* Now process all variables defined in the function or its subblocks. */
2084 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2085
2086 if (DECL_INLINE (fndecl))
2087 {
2088 /* Save all rtl allocated for this function by raising the
2089 high-water mark on the maybepermanent_obstack. */
2090 preserve_data ();
2091 /* All further rtl allocation is now done in the current_obstack. */
2092 rtl_in_current_obstack ();
2093 }
2094 }
2095
2096 /* Subroutine of instantiate_decls: Process all decls in the given
2097 BLOCK node and all its subblocks. */
2098
2099 static void
2100 instantiate_decls_1 (let, valid_only)
2101 tree let;
2102 int valid_only;
2103 {
2104 tree t;
2105
2106 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2107 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2108 valid_only);
2109
2110 /* Process all subblocks. */
2111 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2112 instantiate_decls_1 (t, valid_only);
2113 }
2114
2115 /* Subroutine of the preceding procedures: Given RTL representing a
2116 decl and the size of the object, do any instantiation required.
2117
2118 If VALID_ONLY is non-zero, it means that the RTL should only be
2119 changed if the new address is valid. */
2120
2121 static void
2122 instantiate_decl (x, size, valid_only)
2123 rtx x;
2124 int size;
2125 int valid_only;
2126 {
2127 enum machine_mode mode;
2128 rtx addr;
2129
2130 /* If this is not a MEM, no need to do anything. Similarly if the
2131 address is a constant or a register that is not a virtual register. */
2132
2133 if (x == 0 || GET_CODE (x) != MEM)
2134 return;
2135
2136 addr = XEXP (x, 0);
2137 if (CONSTANT_P (addr)
2138 || (GET_CODE (addr) == REG
2139 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2140 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2141 return;
2142
2143 /* If we should only do this if the address is valid, copy the address.
2144 We need to do this so we can undo any changes that might make the
2145 address invalid. This copy is unfortunate, but probably can't be
2146 avoided. */
2147
2148 if (valid_only)
2149 addr = copy_rtx (addr);
2150
2151 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2152
2153 if (! valid_only)
2154 return;
2155
2156 /* Now verify that the resulting address is valid for every integer or
2157 floating-point mode up to and including SIZE bytes long. We do this
2158 since the object might be accessed in any mode and frame addresses
2159 are shared. */
2160
2161 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2162 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2163 mode = GET_MODE_WIDER_MODE (mode))
2164 if (! memory_address_p (mode, addr))
2165 return;
2166
2167 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2168 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2169 mode = GET_MODE_WIDER_MODE (mode))
2170 if (! memory_address_p (mode, addr))
2171 return;
2172
2173 /* Otherwise, put back the address, now that we have updated it and we
2174 know it is valid. */
2175
2176 XEXP (x, 0) = addr;
2177 }
2178 \f
2179 /* Given a pointer to a piece of rtx and an optional pointer to the
2180 containing object, instantiate any virtual registers present in it.
2181
2182 If EXTRA_INSNS, we always do the replacement and generate
2183 any extra insns before OBJECT. If it zero, we do nothing if replacement
2184 is not valid.
2185
2186 Return 1 if we either had nothing to do or if we were able to do the
2187 needed replacement. Return 0 otherwise; we only return zero if
2188 EXTRA_INSNS is zero.
2189
2190 We first try some simple transformations to avoid the creation of extra
2191 pseudos. */
2192
2193 static int
2194 instantiate_virtual_regs_1 (loc, object, extra_insns)
2195 rtx *loc;
2196 rtx object;
2197 int extra_insns;
2198 {
2199 rtx x;
2200 RTX_CODE code;
2201 rtx new = 0;
2202 int offset;
2203 rtx temp;
2204 rtx seq;
2205 int i, j;
2206 char *fmt;
2207
2208 /* Re-start here to avoid recursion in common cases. */
2209 restart:
2210
2211 x = *loc;
2212 if (x == 0)
2213 return 1;
2214
2215 code = GET_CODE (x);
2216
2217 /* Check for some special cases. */
2218 switch (code)
2219 {
2220 case CONST_INT:
2221 case CONST_DOUBLE:
2222 case CONST:
2223 case SYMBOL_REF:
2224 case CODE_LABEL:
2225 case PC:
2226 case CC0:
2227 case ASM_INPUT:
2228 case ADDR_VEC:
2229 case ADDR_DIFF_VEC:
2230 case RETURN:
2231 return 1;
2232
2233 case SET:
2234 /* We are allowed to set the virtual registers. This means that
2235 that the actual register should receive the source minus the
2236 appropriate offset. This is used, for example, in the handling
2237 of non-local gotos. */
2238 if (SET_DEST (x) == virtual_incoming_args_rtx)
2239 new = arg_pointer_rtx, offset = - in_arg_offset;
2240 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2241 new = frame_pointer_rtx, offset = - var_offset;
2242 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2243 new = stack_pointer_rtx, offset = - dynamic_offset;
2244 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2245 new = stack_pointer_rtx, offset = - out_arg_offset;
2246
2247 if (new)
2248 {
2249 /* The only valid sources here are PLUS or REG. Just do
2250 the simplest possible thing to handle them. */
2251 if (GET_CODE (SET_SRC (x)) != REG
2252 && GET_CODE (SET_SRC (x)) != PLUS)
2253 abort ();
2254
2255 start_sequence ();
2256 if (GET_CODE (SET_SRC (x)) != REG)
2257 temp = force_operand (SET_SRC (x), NULL_RTX);
2258 else
2259 temp = SET_SRC (x);
2260 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2261 seq = get_insns ();
2262 end_sequence ();
2263
2264 emit_insns_before (seq, object);
2265 SET_DEST (x) = new;
2266
2267 if (!validate_change (object, &SET_SRC (x), temp, 0)
2268 || ! extra_insns)
2269 abort ();
2270
2271 return 1;
2272 }
2273
2274 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2275 loc = &SET_SRC (x);
2276 goto restart;
2277
2278 case PLUS:
2279 /* Handle special case of virtual register plus constant. */
2280 if (CONSTANT_P (XEXP (x, 1)))
2281 {
2282 rtx old;
2283
2284 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2285 if (GET_CODE (XEXP (x, 0)) == PLUS)
2286 {
2287 rtx inner = XEXP (XEXP (x, 0), 0);
2288
2289 if (inner == virtual_incoming_args_rtx)
2290 new = arg_pointer_rtx, offset = in_arg_offset;
2291 else if (inner == virtual_stack_vars_rtx)
2292 new = frame_pointer_rtx, offset = var_offset;
2293 else if (inner == virtual_stack_dynamic_rtx)
2294 new = stack_pointer_rtx, offset = dynamic_offset;
2295 else if (inner == virtual_outgoing_args_rtx)
2296 new = stack_pointer_rtx, offset = out_arg_offset;
2297 else
2298 {
2299 loc = &XEXP (x, 0);
2300 goto restart;
2301 }
2302
2303 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2304 extra_insns);
2305 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2306 }
2307
2308 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2309 new = arg_pointer_rtx, offset = in_arg_offset;
2310 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2311 new = frame_pointer_rtx, offset = var_offset;
2312 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2313 new = stack_pointer_rtx, offset = dynamic_offset;
2314 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2315 new = stack_pointer_rtx, offset = out_arg_offset;
2316 else
2317 {
2318 /* We know the second operand is a constant. Unless the
2319 first operand is a REG (which has been already checked),
2320 it needs to be checked. */
2321 if (GET_CODE (XEXP (x, 0)) != REG)
2322 {
2323 loc = &XEXP (x, 0);
2324 goto restart;
2325 }
2326 return 1;
2327 }
2328
2329 old = XEXP (x, 0);
2330 XEXP (x, 0) = new;
2331 new = plus_constant (XEXP (x, 1), offset);
2332
2333 /* If the new constant is zero, try to replace the sum with its
2334 first operand. */
2335 if (new == const0_rtx
2336 && validate_change (object, loc, XEXP (x, 0), 0))
2337 return 1;
2338
2339 /* Next try to replace constant with new one. */
2340 if (!validate_change (object, &XEXP (x, 1), new, 0))
2341 {
2342 if (! extra_insns)
2343 {
2344 XEXP (x, 0) = old;
2345 return 0;
2346 }
2347
2348 /* Otherwise copy the new constant into a register and replace
2349 constant with that register. */
2350 temp = gen_reg_rtx (Pmode);
2351 if (validate_change (object, &XEXP (x, 1), temp, 0))
2352 emit_insn_before (gen_move_insn (temp, new), object);
2353 else
2354 {
2355 /* If that didn't work, replace this expression with a
2356 register containing the sum. */
2357
2358 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2359 XEXP (x, 0) = old;
2360
2361 start_sequence ();
2362 temp = force_operand (new, NULL_RTX);
2363 seq = get_insns ();
2364 end_sequence ();
2365
2366 emit_insns_before (seq, object);
2367 if (! validate_change (object, loc, temp, 0)
2368 && ! validate_replace_rtx (x, temp, object))
2369 abort ();
2370 }
2371 }
2372
2373 return 1;
2374 }
2375
2376 /* Fall through to generic two-operand expression case. */
2377 case EXPR_LIST:
2378 case CALL:
2379 case COMPARE:
2380 case MINUS:
2381 case MULT:
2382 case DIV: case UDIV:
2383 case MOD: case UMOD:
2384 case AND: case IOR: case XOR:
2385 case LSHIFT: case ASHIFT: case ROTATE:
2386 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2387 case NE: case EQ:
2388 case GE: case GT: case GEU: case GTU:
2389 case LE: case LT: case LEU: case LTU:
2390 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2391 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2392 loc = &XEXP (x, 0);
2393 goto restart;
2394
2395 case MEM:
2396 /* Most cases of MEM that convert to valid addresses have already been
2397 handled by our scan of regno_reg_rtx. The only special handling we
2398 need here is to make a copy of the rtx to ensure it isn't being
2399 shared if we have to change it to a pseudo.
2400
2401 If the rtx is a simple reference to an address via a virtual register,
2402 it can potentially be shared. In such cases, first try to make it
2403 a valid address, which can also be shared. Otherwise, copy it and
2404 proceed normally.
2405
2406 First check for common cases that need no processing. These are
2407 usually due to instantiation already being done on a previous instance
2408 of a shared rtx. */
2409
2410 temp = XEXP (x, 0);
2411 if (CONSTANT_ADDRESS_P (temp)
2412 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2413 || temp == arg_pointer_rtx
2414 #endif
2415 || temp == frame_pointer_rtx)
2416 return 1;
2417
2418 if (GET_CODE (temp) == PLUS
2419 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2420 && (XEXP (temp, 0) == frame_pointer_rtx
2421 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2422 || XEXP (temp, 0) == arg_pointer_rtx
2423 #endif
2424 ))
2425 return 1;
2426
2427 if (temp == virtual_stack_vars_rtx
2428 || temp == virtual_incoming_args_rtx
2429 || (GET_CODE (temp) == PLUS
2430 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2431 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2432 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2433 {
2434 /* This MEM may be shared. If the substitution can be done without
2435 the need to generate new pseudos, we want to do it in place
2436 so all copies of the shared rtx benefit. The call below will
2437 only make substitutions if the resulting address is still
2438 valid.
2439
2440 Note that we cannot pass X as the object in the recursive call
2441 since the insn being processed may not allow all valid
2442 addresses. However, if we were not passed on object, we can
2443 only modify X without copying it if X will have a valid
2444 address.
2445
2446 ??? Also note that this can still lose if OBJECT is an insn that
2447 has less restrictions on an address that some other insn.
2448 In that case, we will modify the shared address. This case
2449 doesn't seem very likely, though. */
2450
2451 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2452 object ? object : x, 0))
2453 return 1;
2454
2455 /* Otherwise make a copy and process that copy. We copy the entire
2456 RTL expression since it might be a PLUS which could also be
2457 shared. */
2458 *loc = x = copy_rtx (x);
2459 }
2460
2461 /* Fall through to generic unary operation case. */
2462 case USE:
2463 case CLOBBER:
2464 case SUBREG:
2465 case STRICT_LOW_PART:
2466 case NEG: case NOT:
2467 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2468 case SIGN_EXTEND: case ZERO_EXTEND:
2469 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2470 case FLOAT: case FIX:
2471 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2472 case ABS:
2473 case SQRT:
2474 case FFS:
2475 /* These case either have just one operand or we know that we need not
2476 check the rest of the operands. */
2477 loc = &XEXP (x, 0);
2478 goto restart;
2479
2480 case REG:
2481 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2482 in front of this insn and substitute the temporary. */
2483 if (x == virtual_incoming_args_rtx)
2484 new = arg_pointer_rtx, offset = in_arg_offset;
2485 else if (x == virtual_stack_vars_rtx)
2486 new = frame_pointer_rtx, offset = var_offset;
2487 else if (x == virtual_stack_dynamic_rtx)
2488 new = stack_pointer_rtx, offset = dynamic_offset;
2489 else if (x == virtual_outgoing_args_rtx)
2490 new = stack_pointer_rtx, offset = out_arg_offset;
2491
2492 if (new)
2493 {
2494 temp = plus_constant (new, offset);
2495 if (!validate_change (object, loc, temp, 0))
2496 {
2497 if (! extra_insns)
2498 return 0;
2499
2500 start_sequence ();
2501 temp = force_operand (temp, NULL_RTX);
2502 seq = get_insns ();
2503 end_sequence ();
2504
2505 emit_insns_before (seq, object);
2506 if (! validate_change (object, loc, temp, 0)
2507 && ! validate_replace_rtx (x, temp, object))
2508 abort ();
2509 }
2510 }
2511
2512 return 1;
2513 }
2514
2515 /* Scan all subexpressions. */
2516 fmt = GET_RTX_FORMAT (code);
2517 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2518 if (*fmt == 'e')
2519 {
2520 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2521 return 0;
2522 }
2523 else if (*fmt == 'E')
2524 for (j = 0; j < XVECLEN (x, i); j++)
2525 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2526 extra_insns))
2527 return 0;
2528
2529 return 1;
2530 }
2531 \f
2532 /* Optimization: assuming this function does not receive nonlocal gotos,
2533 delete the handlers for such, as well as the insns to establish
2534 and disestablish them. */
2535
2536 static void
2537 delete_handlers ()
2538 {
2539 rtx insn;
2540 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2541 {
2542 /* Delete the handler by turning off the flag that would
2543 prevent jump_optimize from deleting it.
2544 Also permit deletion of the nonlocal labels themselves
2545 if nothing local refers to them. */
2546 if (GET_CODE (insn) == CODE_LABEL)
2547 LABEL_PRESERVE_P (insn) = 0;
2548 if (GET_CODE (insn) == INSN
2549 && ((nonlocal_goto_handler_slot != 0
2550 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
2551 || (nonlocal_goto_stack_level != 0
2552 && reg_mentioned_p (nonlocal_goto_stack_level,
2553 PATTERN (insn)))))
2554 delete_insn (insn);
2555 }
2556 }
2557
2558 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2559 of the current function. */
2560
2561 rtx
2562 nonlocal_label_rtx_list ()
2563 {
2564 tree t;
2565 rtx x = 0;
2566
2567 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2568 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2569
2570 return x;
2571 }
2572 \f
2573 /* Output a USE for any register use in RTL.
2574 This is used with -noreg to mark the extent of lifespan
2575 of any registers used in a user-visible variable's DECL_RTL. */
2576
2577 void
2578 use_variable (rtl)
2579 rtx rtl;
2580 {
2581 if (GET_CODE (rtl) == REG)
2582 /* This is a register variable. */
2583 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2584 else if (GET_CODE (rtl) == MEM
2585 && GET_CODE (XEXP (rtl, 0)) == REG
2586 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2587 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2588 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2589 /* This is a variable-sized structure. */
2590 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2591 }
2592
2593 /* Like use_variable except that it outputs the USEs after INSN
2594 instead of at the end of the insn-chain. */
2595
2596 void
2597 use_variable_after (rtl, insn)
2598 rtx rtl, insn;
2599 {
2600 if (GET_CODE (rtl) == REG)
2601 /* This is a register variable. */
2602 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2603 else if (GET_CODE (rtl) == MEM
2604 && GET_CODE (XEXP (rtl, 0)) == REG
2605 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2606 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2607 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2608 /* This is a variable-sized structure. */
2609 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2610 }
2611 \f
2612 int
2613 max_parm_reg_num ()
2614 {
2615 return max_parm_reg;
2616 }
2617
2618 /* Return the first insn following those generated by `assign_parms'. */
2619
2620 rtx
2621 get_first_nonparm_insn ()
2622 {
2623 if (last_parm_insn)
2624 return NEXT_INSN (last_parm_insn);
2625 return get_insns ();
2626 }
2627
2628 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
2629 Crash if there is none. */
2630
2631 rtx
2632 get_first_block_beg ()
2633 {
2634 register rtx searcher;
2635 register rtx insn = get_first_nonparm_insn ();
2636
2637 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
2638 if (GET_CODE (searcher) == NOTE
2639 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
2640 return searcher;
2641
2642 abort (); /* Invalid call to this function. (See comments above.) */
2643 return NULL_RTX;
2644 }
2645
2646 /* Return 1 if EXP returns an aggregate value, for which an address
2647 must be passed to the function or returned by the function. */
2648
2649 int
2650 aggregate_value_p (exp)
2651 tree exp;
2652 {
2653 int i, regno, nregs;
2654 rtx reg;
2655 if (RETURN_IN_MEMORY (TREE_TYPE (exp)))
2656 return 1;
2657 if (flag_pcc_struct_return
2658 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2659 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
2660 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE))
2661 return 1;
2662 /* Make sure we have suitable call-clobbered regs to return
2663 the value in; if not, we must return it in memory. */
2664 reg = hard_function_value (TREE_TYPE (exp), 0);
2665 regno = REGNO (reg);
2666 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (TREE_TYPE (exp)));
2667 for (i = 0; i < nregs; i++)
2668 if (! call_used_regs[regno + i])
2669 return 1;
2670 return 0;
2671 }
2672 \f
2673 /* Assign RTL expressions to the function's parameters.
2674 This may involve copying them into registers and using
2675 those registers as the RTL for them.
2676
2677 If SECOND_TIME is non-zero it means that this function is being
2678 called a second time. This is done by integrate.c when a function's
2679 compilation is deferred. We need to come back here in case the
2680 FUNCTION_ARG macro computes items needed for the rest of the compilation
2681 (such as changing which registers are fixed or caller-saved). But suppress
2682 writing any insns or setting DECL_RTL of anything in this case. */
2683
2684 void
2685 assign_parms (fndecl, second_time)
2686 tree fndecl;
2687 int second_time;
2688 {
2689 register tree parm;
2690 register rtx entry_parm = 0;
2691 register rtx stack_parm = 0;
2692 CUMULATIVE_ARGS args_so_far;
2693 enum machine_mode promoted_mode, passed_mode, nominal_mode;
2694 int unsignedp;
2695 /* Total space needed so far for args on the stack,
2696 given as a constant and a tree-expression. */
2697 struct args_size stack_args_size;
2698 tree fntype = TREE_TYPE (fndecl);
2699 tree fnargs = DECL_ARGUMENTS (fndecl);
2700 /* This is used for the arg pointer when referring to stack args. */
2701 rtx internal_arg_pointer;
2702 /* This is a dummy PARM_DECL that we used for the function result if
2703 the function returns a structure. */
2704 tree function_result_decl = 0;
2705 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2706 int varargs_setup = 0;
2707 rtx conversion_insns = 0;
2708 /* FUNCTION_ARG may look at this variable. Since this is not
2709 expanding a call it will always be zero in this function. */
2710 int current_call_is_indirect = 0;
2711
2712 /* Nonzero if the last arg is named `__builtin_va_alist',
2713 which is used on some machines for old-fashioned non-ANSI varargs.h;
2714 this should be stuck onto the stack as if it had arrived there. */
2715 int vararg
2716 = (fnargs
2717 && (parm = tree_last (fnargs)) != 0
2718 && DECL_NAME (parm)
2719 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2720 "__builtin_va_alist")));
2721
2722 /* Nonzero if function takes extra anonymous args.
2723 This means the last named arg must be on the stack
2724 right before the anonymous ones. */
2725 int stdarg
2726 = (TYPE_ARG_TYPES (fntype) != 0
2727 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2728 != void_type_node));
2729
2730 /* If the reg that the virtual arg pointer will be translated into is
2731 not a fixed reg or is the stack pointer, make a copy of the virtual
2732 arg pointer, and address parms via the copy. The frame pointer is
2733 considered fixed even though it is not marked as such.
2734
2735 The second time through, simply use ap to avoid generating rtx. */
2736
2737 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2738 || ! (fixed_regs[ARG_POINTER_REGNUM]
2739 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2740 && ! second_time)
2741 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2742 else
2743 internal_arg_pointer = virtual_incoming_args_rtx;
2744 current_function_internal_arg_pointer = internal_arg_pointer;
2745
2746 stack_args_size.constant = 0;
2747 stack_args_size.var = 0;
2748
2749 /* If struct value address is treated as the first argument, make it so. */
2750 if (aggregate_value_p (DECL_RESULT (fndecl))
2751 && ! current_function_returns_pcc_struct
2752 && struct_value_incoming_rtx == 0)
2753 {
2754 tree type = build_pointer_type (fntype);
2755
2756 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
2757
2758 DECL_ARG_TYPE (function_result_decl) = type;
2759 TREE_CHAIN (function_result_decl) = fnargs;
2760 fnargs = function_result_decl;
2761 }
2762
2763 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2764 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2765
2766 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2767 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
2768 #else
2769 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX);
2770 #endif
2771
2772 /* We haven't yet found an argument that we must push and pretend the
2773 caller did. */
2774 current_function_pretend_args_size = 0;
2775
2776 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2777 {
2778 int aggregate
2779 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2780 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
2781 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE
2782 || TREE_CODE (TREE_TYPE (parm)) == QUAL_UNION_TYPE);
2783 struct args_size stack_offset;
2784 struct args_size arg_size;
2785 int passed_pointer = 0;
2786 tree passed_type = DECL_ARG_TYPE (parm);
2787
2788 /* Set LAST_NAMED if this is last named arg before some
2789 anonymous args. We treat it as if it were anonymous too. */
2790 int last_named = ((TREE_CHAIN (parm) == 0
2791 || DECL_NAME (TREE_CHAIN (parm)) == 0)
2792 && (vararg || stdarg));
2793
2794 if (TREE_TYPE (parm) == error_mark_node
2795 /* This can happen after weird syntax errors
2796 or if an enum type is defined among the parms. */
2797 || TREE_CODE (parm) != PARM_DECL
2798 || passed_type == NULL)
2799 {
2800 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
2801 const0_rtx);
2802 TREE_USED (parm) = 1;
2803 continue;
2804 }
2805
2806 /* For varargs.h function, save info about regs and stack space
2807 used by the individual args, not including the va_alist arg. */
2808 if (vararg && last_named)
2809 current_function_args_info = args_so_far;
2810
2811 /* Find mode of arg as it is passed, and mode of arg
2812 as it should be during execution of this function. */
2813 passed_mode = TYPE_MODE (passed_type);
2814 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
2815
2816 /* If the parm's mode is VOID, its value doesn't matter,
2817 and avoid the usual things like emit_move_insn that could crash. */
2818 if (nominal_mode == VOIDmode)
2819 {
2820 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
2821 continue;
2822 }
2823
2824 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2825 /* See if this arg was passed by invisible reference. */
2826 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
2827 passed_type, ! last_named))
2828 {
2829 passed_type = build_pointer_type (passed_type);
2830 passed_pointer = 1;
2831 passed_mode = nominal_mode = Pmode;
2832 }
2833 #endif
2834
2835 promoted_mode = passed_mode;
2836
2837 #ifdef PROMOTE_FUNCTION_ARGS
2838 /* Compute the mode in which the arg is actually extended to. */
2839 if (TREE_CODE (passed_type) == INTEGER_TYPE
2840 || TREE_CODE (passed_type) == ENUMERAL_TYPE
2841 || TREE_CODE (passed_type) == BOOLEAN_TYPE
2842 || TREE_CODE (passed_type) == CHAR_TYPE
2843 || TREE_CODE (passed_type) == REAL_TYPE
2844 || TREE_CODE (passed_type) == POINTER_TYPE
2845 || TREE_CODE (passed_type) == OFFSET_TYPE)
2846 {
2847 unsignedp = TREE_UNSIGNED (passed_type);
2848 PROMOTE_MODE (promoted_mode, unsignedp, passed_type);
2849 }
2850 #endif
2851
2852 /* Let machine desc say which reg (if any) the parm arrives in.
2853 0 means it arrives on the stack. */
2854 #ifdef FUNCTION_INCOMING_ARG
2855 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
2856 passed_type, ! last_named);
2857 #else
2858 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
2859 passed_type, ! last_named);
2860 #endif
2861
2862 if (entry_parm)
2863 passed_mode = promoted_mode;
2864
2865 #ifdef SETUP_INCOMING_VARARGS
2866 /* If this is the last named parameter, do any required setup for
2867 varargs or stdargs. We need to know about the case of this being an
2868 addressable type, in which case we skip the registers it
2869 would have arrived in.
2870
2871 For stdargs, LAST_NAMED will be set for two parameters, the one that
2872 is actually the last named, and the dummy parameter. We only
2873 want to do this action once.
2874
2875 Also, indicate when RTL generation is to be suppressed. */
2876 if (last_named && !varargs_setup)
2877 {
2878 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
2879 current_function_pretend_args_size,
2880 second_time);
2881 varargs_setup = 1;
2882 }
2883 #endif
2884
2885 /* Determine parm's home in the stack,
2886 in case it arrives in the stack or we should pretend it did.
2887
2888 Compute the stack position and rtx where the argument arrives
2889 and its size.
2890
2891 There is one complexity here: If this was a parameter that would
2892 have been passed in registers, but wasn't only because it is
2893 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2894 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2895 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
2896 0 as it was the previous time. */
2897
2898 locate_and_pad_parm (passed_mode, passed_type,
2899 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2900 1,
2901 #else
2902 #ifdef FUNCTION_INCOMING_ARG
2903 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
2904 passed_type,
2905 (! last_named
2906 || varargs_setup)) != 0,
2907 #else
2908 FUNCTION_ARG (args_so_far, passed_mode,
2909 passed_type,
2910 ! last_named || varargs_setup) != 0,
2911 #endif
2912 #endif
2913 fndecl, &stack_args_size, &stack_offset, &arg_size);
2914
2915 if (! second_time)
2916 {
2917 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
2918
2919 if (offset_rtx == const0_rtx)
2920 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
2921 else
2922 stack_parm = gen_rtx (MEM, passed_mode,
2923 gen_rtx (PLUS, Pmode,
2924 internal_arg_pointer, offset_rtx));
2925
2926 /* If this is a memory ref that contains aggregate components,
2927 mark it as such for cse and loop optimize. */
2928 MEM_IN_STRUCT_P (stack_parm) = aggregate;
2929 }
2930
2931 /* If this parameter was passed both in registers and in the stack,
2932 use the copy on the stack. */
2933 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
2934 entry_parm = 0;
2935
2936 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2937 /* If this parm was passed part in regs and part in memory,
2938 pretend it arrived entirely in memory
2939 by pushing the register-part onto the stack.
2940
2941 In the special case of a DImode or DFmode that is split,
2942 we could put it together in a pseudoreg directly,
2943 but for now that's not worth bothering with. */
2944
2945 if (entry_parm)
2946 {
2947 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
2948 passed_type, ! last_named);
2949
2950 if (nregs > 0)
2951 {
2952 current_function_pretend_args_size
2953 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
2954 / (PARM_BOUNDARY / BITS_PER_UNIT)
2955 * (PARM_BOUNDARY / BITS_PER_UNIT));
2956
2957 if (! second_time)
2958 move_block_from_reg (REGNO (entry_parm),
2959 validize_mem (stack_parm), nregs);
2960 entry_parm = stack_parm;
2961 }
2962 }
2963 #endif
2964
2965 /* If we didn't decide this parm came in a register,
2966 by default it came on the stack. */
2967 if (entry_parm == 0)
2968 entry_parm = stack_parm;
2969
2970 /* Record permanently how this parm was passed. */
2971 if (! second_time)
2972 DECL_INCOMING_RTL (parm) = entry_parm;
2973
2974 /* If there is actually space on the stack for this parm,
2975 count it in stack_args_size; otherwise set stack_parm to 0
2976 to indicate there is no preallocated stack slot for the parm. */
2977
2978 if (entry_parm == stack_parm
2979 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
2980 /* On some machines, even if a parm value arrives in a register
2981 there is still an (uninitialized) stack slot allocated for it.
2982
2983 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
2984 whether this parameter already has a stack slot allocated,
2985 because an arg block exists only if current_function_args_size
2986 is larger than some threshhold, and we haven't calculated that
2987 yet. So, for now, we just assume that stack slots never exist
2988 in this case. */
2989 || REG_PARM_STACK_SPACE (fndecl) > 0
2990 #endif
2991 )
2992 {
2993 stack_args_size.constant += arg_size.constant;
2994 if (arg_size.var)
2995 ADD_PARM_SIZE (stack_args_size, arg_size.var);
2996 }
2997 else
2998 /* No stack slot was pushed for this parm. */
2999 stack_parm = 0;
3000
3001 /* Update info on where next arg arrives in registers. */
3002
3003 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
3004 passed_type, ! last_named);
3005
3006 /* If this is our second time through, we are done with this parm. */
3007 if (second_time)
3008 continue;
3009
3010 /* If we can't trust the parm stack slot to be aligned enough
3011 for its ultimate type, don't use that slot after entry.
3012 We'll make another stack slot, if we need one. */
3013 {
3014 int thisparm_boundary
3015 = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type);
3016
3017 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3018 stack_parm = 0;
3019 }
3020
3021 /* Now adjust STACK_PARM to the mode and precise location
3022 where this parameter should live during execution,
3023 if we discover that it must live in the stack during execution.
3024 To make debuggers happier on big-endian machines, we store
3025 the value in the last bytes of the space available. */
3026
3027 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3028 && stack_parm != 0)
3029 {
3030 rtx offset_rtx;
3031
3032 #if BYTES_BIG_ENDIAN
3033 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3034 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3035 - GET_MODE_SIZE (nominal_mode));
3036 #endif
3037
3038 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3039 if (offset_rtx == const0_rtx)
3040 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
3041 else
3042 stack_parm = gen_rtx (MEM, nominal_mode,
3043 gen_rtx (PLUS, Pmode,
3044 internal_arg_pointer, offset_rtx));
3045
3046 /* If this is a memory ref that contains aggregate components,
3047 mark it as such for cse and loop optimize. */
3048 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3049 }
3050
3051 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3052 in the mode in which it arrives.
3053 STACK_PARM is an RTX for a stack slot where the parameter can live
3054 during the function (in case we want to put it there).
3055 STACK_PARM is 0 if no stack slot was pushed for it.
3056
3057 Now output code if necessary to convert ENTRY_PARM to
3058 the type in which this function declares it,
3059 and store that result in an appropriate place,
3060 which may be a pseudo reg, may be STACK_PARM,
3061 or may be a local stack slot if STACK_PARM is 0.
3062
3063 Set DECL_RTL to that place. */
3064
3065 if (nominal_mode == BLKmode)
3066 {
3067 /* If a BLKmode arrives in registers, copy it to a stack slot. */
3068 if (GET_CODE (entry_parm) == REG)
3069 {
3070 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3071 UNITS_PER_WORD);
3072
3073 /* Note that we will be storing an integral number of words.
3074 So we have to be careful to ensure that we allocate an
3075 integral number of words. We do this below in the
3076 assign_stack_local if space was not allocated in the argument
3077 list. If it was, this will not work if PARM_BOUNDARY is not
3078 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3079 if it becomes a problem. */
3080
3081 if (stack_parm == 0)
3082 {
3083 stack_parm
3084 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
3085 /* If this is a memory ref that contains aggregate components,
3086 mark it as such for cse and loop optimize. */
3087 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3088 }
3089
3090 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3091 abort ();
3092
3093 move_block_from_reg (REGNO (entry_parm),
3094 validize_mem (stack_parm),
3095 size_stored / UNITS_PER_WORD);
3096 }
3097 DECL_RTL (parm) = stack_parm;
3098 }
3099 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
3100 && ! DECL_INLINE (fndecl))
3101 /* layout_decl may set this. */
3102 || TREE_ADDRESSABLE (parm)
3103 || TREE_SIDE_EFFECTS (parm)
3104 /* If -ffloat-store specified, don't put explicit
3105 float variables into registers. */
3106 || (flag_float_store
3107 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3108 /* Always assign pseudo to structure return or item passed
3109 by invisible reference. */
3110 || passed_pointer || parm == function_result_decl)
3111 {
3112 /* Store the parm in a pseudoregister during the function, but we
3113 may need to do it in a wider mode. */
3114
3115 register rtx parmreg;
3116
3117 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3118 if (TREE_CODE (TREE_TYPE (parm)) == INTEGER_TYPE
3119 || TREE_CODE (TREE_TYPE (parm)) == ENUMERAL_TYPE
3120 || TREE_CODE (TREE_TYPE (parm)) == BOOLEAN_TYPE
3121 || TREE_CODE (TREE_TYPE (parm)) == CHAR_TYPE
3122 || TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE
3123 || TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE
3124 || TREE_CODE (TREE_TYPE (parm)) == OFFSET_TYPE)
3125 {
3126 PROMOTE_MODE (nominal_mode, unsignedp, TREE_TYPE (parm));
3127 }
3128
3129 parmreg = gen_reg_rtx (nominal_mode);
3130 REG_USERVAR_P (parmreg) = 1;
3131
3132 /* If this was an item that we received a pointer to, set DECL_RTL
3133 appropriately. */
3134 if (passed_pointer)
3135 {
3136 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3137 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3138 }
3139 else
3140 DECL_RTL (parm) = parmreg;
3141
3142 /* Copy the value into the register. */
3143 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
3144 {
3145 /* If ENTRY_PARM is a hard register, it might be in a register
3146 not valid for operating in its mode (e.g., an odd-numbered
3147 register for a DFmode). In that case, moves are the only
3148 thing valid, so we can't do a convert from there. This
3149 occurs when the calling sequence allow such misaligned
3150 usages.
3151
3152 In addition, the conversion may involve a call, which could
3153 clobber parameters which haven't been copied to pseudo
3154 registers yet. Therefore, we must first copy the parm to
3155 a pseudo reg here, and save the conversion until after all
3156 parameters have been moved. */
3157
3158 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3159
3160 emit_move_insn (tempreg, validize_mem (entry_parm));
3161
3162 push_to_sequence (conversion_insns);
3163 convert_move (parmreg, tempreg, unsignedp);
3164 conversion_insns = get_insns ();
3165 end_sequence ();
3166 }
3167 else
3168 emit_move_insn (parmreg, validize_mem (entry_parm));
3169
3170 /* If we were passed a pointer but the actual value
3171 can safely live in a register, put it in one. */
3172 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3173 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3174 && ! DECL_INLINE (fndecl))
3175 /* layout_decl may set this. */
3176 || TREE_ADDRESSABLE (parm)
3177 || TREE_SIDE_EFFECTS (parm)
3178 /* If -ffloat-store specified, don't put explicit
3179 float variables into registers. */
3180 || (flag_float_store
3181 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3182 {
3183 /* We can't use nominal_mode, because it will have been set to
3184 Pmode above. We must use the actual mode of the parm. */
3185 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3186 emit_move_insn (parmreg, DECL_RTL (parm));
3187 DECL_RTL (parm) = parmreg;
3188 }
3189 #ifdef FUNCTION_ARG_CALLEE_COPIES
3190 /* If we are passed an arg by reference and it is our responsibility
3191 to make a copy, do it now.
3192 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3193 original argument, so we must recreate them in the call to
3194 FUNCTION_ARG_CALLEE_COPIES. */
3195 /* ??? Later add code to handle the case that if the argument isn't
3196 modified, don't do the copy. */
3197
3198 else if (passed_pointer
3199 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
3200 TYPE_MODE (DECL_ARG_TYPE (parm)),
3201 DECL_ARG_TYPE (parm),
3202 ! last_named))
3203 {
3204 rtx copy;
3205 tree type = DECL_ARG_TYPE (parm);
3206
3207 /* This sequence may involve a library call perhaps clobbering
3208 registers that haven't been copied to pseudos yet. */
3209
3210 push_to_sequence (conversion_insns);
3211
3212 if (TYPE_SIZE (type) == 0
3213 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3214 {
3215 /* This is a variable sized object. */
3216 /* ??? Can we use expr_size here? */
3217 rtx size_rtx = expand_expr (size_in_bytes (type), NULL_RTX,
3218 TYPE_MODE (sizetype), 0);
3219
3220 copy = gen_rtx (MEM, BLKmode,
3221 allocate_dynamic_stack_space (size_rtx, NULL_RTX,
3222 TYPE_ALIGN (type)));
3223 }
3224 else
3225 {
3226 int size = int_size_in_bytes (type);
3227 copy = assign_stack_temp (TYPE_MODE (type), size, 1);
3228 }
3229
3230 store_expr (parm, copy, 0);
3231 emit_move_insn (parmreg, XEXP (copy, 0));
3232 conversion_insns = get_insns ();
3233 end_sequence ();
3234 }
3235 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3236
3237 /* In any case, record the parm's desired stack location
3238 in case we later discover it must live in the stack. */
3239 if (REGNO (parmreg) >= nparmregs)
3240 {
3241 rtx *new;
3242 nparmregs = REGNO (parmreg) + 5;
3243 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3244 bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx));
3245 parm_reg_stack_loc = new;
3246 }
3247 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3248
3249 /* Mark the register as eliminable if we did no conversion
3250 and it was copied from memory at a fixed offset,
3251 and the arg pointer was not copied to a pseudo-reg.
3252 If the arg pointer is a pseudo reg or the offset formed
3253 an invalid address, such memory-equivalences
3254 as we make here would screw up life analysis for it. */
3255 if (nominal_mode == passed_mode
3256 && GET_CODE (entry_parm) == MEM
3257 && entry_parm == stack_parm
3258 && stack_offset.var == 0
3259 && reg_mentioned_p (virtual_incoming_args_rtx,
3260 XEXP (entry_parm, 0)))
3261 REG_NOTES (get_last_insn ())
3262 = gen_rtx (EXPR_LIST, REG_EQUIV,
3263 entry_parm, REG_NOTES (get_last_insn ()));
3264
3265 /* For pointer data type, suggest pointer register. */
3266 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3267 mark_reg_pointer (parmreg);
3268 }
3269 else
3270 {
3271 /* Value must be stored in the stack slot STACK_PARM
3272 during function execution. */
3273
3274 if (passed_mode != nominal_mode)
3275 {
3276 /* Conversion is required. */
3277 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3278
3279 emit_move_insn (tempreg, validize_mem (entry_parm));
3280
3281 push_to_sequence (conversion_insns);
3282 entry_parm = convert_to_mode (nominal_mode, tempreg,
3283 TREE_UNSIGNED (TREE_TYPE (parm)));
3284 conversion_insns = get_insns ();
3285 end_sequence ();
3286 }
3287
3288 if (entry_parm != stack_parm)
3289 {
3290 if (stack_parm == 0)
3291 {
3292 stack_parm
3293 = assign_stack_local (GET_MODE (entry_parm),
3294 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3295 /* If this is a memory ref that contains aggregate components,
3296 mark it as such for cse and loop optimize. */
3297 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3298 }
3299
3300 if (passed_mode != nominal_mode)
3301 {
3302 push_to_sequence (conversion_insns);
3303 emit_move_insn (validize_mem (stack_parm),
3304 validize_mem (entry_parm));
3305 conversion_insns = get_insns ();
3306 end_sequence ();
3307 }
3308 else
3309 emit_move_insn (validize_mem (stack_parm),
3310 validize_mem (entry_parm));
3311 }
3312
3313 DECL_RTL (parm) = stack_parm;
3314 }
3315
3316 /* If this "parameter" was the place where we are receiving the
3317 function's incoming structure pointer, set up the result. */
3318 if (parm == function_result_decl)
3319 DECL_RTL (DECL_RESULT (fndecl))
3320 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
3321
3322 if (TREE_THIS_VOLATILE (parm))
3323 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
3324 if (TREE_READONLY (parm))
3325 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
3326 }
3327
3328 /* Output all parameter conversion instructions (possibly including calls)
3329 now that all parameters have been copied out of hard registers. */
3330 emit_insns (conversion_insns);
3331
3332 max_parm_reg = max_reg_num ();
3333 last_parm_insn = get_last_insn ();
3334
3335 current_function_args_size = stack_args_size.constant;
3336
3337 /* Adjust function incoming argument size for alignment and
3338 minimum length. */
3339
3340 #ifdef REG_PARM_STACK_SPACE
3341 #ifndef MAYBE_REG_PARM_STACK_SPACE
3342 current_function_args_size = MAX (current_function_args_size,
3343 REG_PARM_STACK_SPACE (fndecl));
3344 #endif
3345 #endif
3346
3347 #ifdef STACK_BOUNDARY
3348 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
3349
3350 current_function_args_size
3351 = ((current_function_args_size + STACK_BYTES - 1)
3352 / STACK_BYTES) * STACK_BYTES;
3353 #endif
3354
3355 #ifdef ARGS_GROW_DOWNWARD
3356 current_function_arg_offset_rtx
3357 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
3358 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
3359 size_int (-stack_args_size.constant)),
3360 NULL_RTX, VOIDmode, 0));
3361 #else
3362 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
3363 #endif
3364
3365 /* See how many bytes, if any, of its args a function should try to pop
3366 on return. */
3367
3368 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
3369 current_function_args_size);
3370
3371 /* For stdarg.h function, save info about regs and stack space
3372 used by the named args. */
3373
3374 if (stdarg)
3375 current_function_args_info = args_so_far;
3376
3377 /* Set the rtx used for the function return value. Put this in its
3378 own variable so any optimizers that need this information don't have
3379 to include tree.h. Do this here so it gets done when an inlined
3380 function gets output. */
3381
3382 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
3383 }
3384 \f
3385 /* Indicate whether REGNO is an incoming argument to the current function
3386 that was promoted to a wider mode. If so, return the RTX for the
3387 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3388 that REGNO is promoted from and whether the promotion was signed or
3389 unsigned. */
3390
3391 #ifdef PROMOTE_FUNCTION_ARGS
3392
3393 rtx
3394 promoted_input_arg (regno, pmode, punsignedp)
3395 int regno;
3396 enum machine_mode *pmode;
3397 int *punsignedp;
3398 {
3399 tree arg;
3400
3401 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3402 arg = TREE_CHAIN (arg))
3403 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
3404 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3405 && (TREE_CODE (TREE_TYPE (arg)) == INTEGER_TYPE
3406 || TREE_CODE (TREE_TYPE (arg)) == ENUMERAL_TYPE
3407 || TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE
3408 || TREE_CODE (TREE_TYPE (arg)) == CHAR_TYPE
3409 || TREE_CODE (TREE_TYPE (arg)) == REAL_TYPE
3410 || TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE
3411 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE))
3412 {
3413 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3414 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
3415
3416 PROMOTE_MODE (mode, unsignedp, TREE_TYPE (arg));
3417 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3418 && mode != DECL_MODE (arg))
3419 {
3420 *pmode = DECL_MODE (arg);
3421 *punsignedp = unsignedp;
3422 return DECL_INCOMING_RTL (arg);
3423 }
3424 }
3425
3426 return 0;
3427 }
3428
3429 #endif
3430 \f
3431 /* Compute the size and offset from the start of the stacked arguments for a
3432 parm passed in mode PASSED_MODE and with type TYPE.
3433
3434 INITIAL_OFFSET_PTR points to the current offset into the stacked
3435 arguments.
3436
3437 The starting offset and size for this parm are returned in *OFFSET_PTR
3438 and *ARG_SIZE_PTR, respectively.
3439
3440 IN_REGS is non-zero if the argument will be passed in registers. It will
3441 never be set if REG_PARM_STACK_SPACE is not defined.
3442
3443 FNDECL is the function in which the argument was defined.
3444
3445 There are two types of rounding that are done. The first, controlled by
3446 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3447 list to be aligned to the specific boundary (in bits). This rounding
3448 affects the initial and starting offsets, but not the argument size.
3449
3450 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3451 optionally rounds the size of the parm to PARM_BOUNDARY. The
3452 initial offset is not affected by this rounding, while the size always
3453 is and the starting offset may be. */
3454
3455 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
3456 initial_offset_ptr is positive because locate_and_pad_parm's
3457 callers pass in the total size of args so far as
3458 initial_offset_ptr. arg_size_ptr is always positive.*/
3459
3460 static void pad_to_arg_alignment (), pad_below ();
3461
3462 void
3463 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
3464 initial_offset_ptr, offset_ptr, arg_size_ptr)
3465 enum machine_mode passed_mode;
3466 tree type;
3467 int in_regs;
3468 tree fndecl;
3469 struct args_size *initial_offset_ptr;
3470 struct args_size *offset_ptr;
3471 struct args_size *arg_size_ptr;
3472 {
3473 tree sizetree
3474 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3475 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3476 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3477 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3478 int reg_parm_stack_space = 0;
3479
3480 #ifdef REG_PARM_STACK_SPACE
3481 /* If we have found a stack parm before we reach the end of the
3482 area reserved for registers, skip that area. */
3483 if (! in_regs)
3484 {
3485 #ifdef MAYBE_REG_PARM_STACK_SPACE
3486 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3487 #else
3488 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3489 #endif
3490 if (reg_parm_stack_space > 0)
3491 {
3492 if (initial_offset_ptr->var)
3493 {
3494 initial_offset_ptr->var
3495 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3496 size_int (reg_parm_stack_space));
3497 initial_offset_ptr->constant = 0;
3498 }
3499 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3500 initial_offset_ptr->constant = reg_parm_stack_space;
3501 }
3502 }
3503 #endif /* REG_PARM_STACK_SPACE */
3504
3505 arg_size_ptr->var = 0;
3506 arg_size_ptr->constant = 0;
3507
3508 #ifdef ARGS_GROW_DOWNWARD
3509 if (initial_offset_ptr->var)
3510 {
3511 offset_ptr->constant = 0;
3512 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3513 initial_offset_ptr->var);
3514 }
3515 else
3516 {
3517 offset_ptr->constant = - initial_offset_ptr->constant;
3518 offset_ptr->var = 0;
3519 }
3520 if (where_pad == upward
3521 && (TREE_CODE (sizetree) != INTEGER_CST
3522 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3523 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3524 SUB_PARM_SIZE (*offset_ptr, sizetree);
3525 if (where_pad != downward)
3526 pad_to_arg_alignment (offset_ptr, boundary);
3527 if (initial_offset_ptr->var)
3528 {
3529 arg_size_ptr->var = size_binop (MINUS_EXPR,
3530 size_binop (MINUS_EXPR,
3531 integer_zero_node,
3532 initial_offset_ptr->var),
3533 offset_ptr->var);
3534 }
3535 else
3536 {
3537 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3538 offset_ptr->constant);
3539 }
3540 /* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3541 if (where_pad == downward)
3542 pad_below (arg_size_ptr, passed_mode, sizetree);
3543 #else /* !ARGS_GROW_DOWNWARD */
3544 pad_to_arg_alignment (initial_offset_ptr, boundary);
3545 *offset_ptr = *initial_offset_ptr;
3546 if (where_pad == downward)
3547 pad_below (offset_ptr, passed_mode, sizetree);
3548
3549 #ifdef PUSH_ROUNDING
3550 if (passed_mode != BLKmode)
3551 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3552 #endif
3553
3554 if (where_pad != none
3555 && (TREE_CODE (sizetree) != INTEGER_CST
3556 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3557 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3558
3559 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3560 #endif /* ARGS_GROW_DOWNWARD */
3561 }
3562
3563 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3564 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3565
3566 static void
3567 pad_to_arg_alignment (offset_ptr, boundary)
3568 struct args_size *offset_ptr;
3569 int boundary;
3570 {
3571 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3572
3573 if (boundary > BITS_PER_UNIT)
3574 {
3575 if (offset_ptr->var)
3576 {
3577 offset_ptr->var =
3578 #ifdef ARGS_GROW_DOWNWARD
3579 round_down
3580 #else
3581 round_up
3582 #endif
3583 (ARGS_SIZE_TREE (*offset_ptr),
3584 boundary / BITS_PER_UNIT);
3585 offset_ptr->constant = 0; /*?*/
3586 }
3587 else
3588 offset_ptr->constant =
3589 #ifdef ARGS_GROW_DOWNWARD
3590 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3591 #else
3592 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3593 #endif
3594 }
3595 }
3596
3597 static void
3598 pad_below (offset_ptr, passed_mode, sizetree)
3599 struct args_size *offset_ptr;
3600 enum machine_mode passed_mode;
3601 tree sizetree;
3602 {
3603 if (passed_mode != BLKmode)
3604 {
3605 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3606 offset_ptr->constant
3607 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3608 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3609 - GET_MODE_SIZE (passed_mode));
3610 }
3611 else
3612 {
3613 if (TREE_CODE (sizetree) != INTEGER_CST
3614 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3615 {
3616 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3617 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3618 /* Add it in. */
3619 ADD_PARM_SIZE (*offset_ptr, s2);
3620 SUB_PARM_SIZE (*offset_ptr, sizetree);
3621 }
3622 }
3623 }
3624
3625 static tree
3626 round_down (value, divisor)
3627 tree value;
3628 int divisor;
3629 {
3630 return size_binop (MULT_EXPR,
3631 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3632 size_int (divisor));
3633 }
3634 \f
3635 /* Walk the tree of blocks describing the binding levels within a function
3636 and warn about uninitialized variables.
3637 This is done after calling flow_analysis and before global_alloc
3638 clobbers the pseudo-regs to hard regs. */
3639
3640 void
3641 uninitialized_vars_warning (block)
3642 tree block;
3643 {
3644 register tree decl, sub;
3645 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3646 {
3647 if (TREE_CODE (decl) == VAR_DECL
3648 /* These warnings are unreliable for and aggregates
3649 because assigning the fields one by one can fail to convince
3650 flow.c that the entire aggregate was initialized.
3651 Unions are troublesome because members may be shorter. */
3652 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3653 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
3654 && TREE_CODE (TREE_TYPE (decl)) != QUAL_UNION_TYPE
3655 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3656 && DECL_RTL (decl) != 0
3657 && GET_CODE (DECL_RTL (decl)) == REG
3658 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3659 warning_with_decl (decl,
3660 "`%s' may be used uninitialized in this function");
3661 if (TREE_CODE (decl) == VAR_DECL
3662 && DECL_RTL (decl) != 0
3663 && GET_CODE (DECL_RTL (decl)) == REG
3664 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3665 warning_with_decl (decl,
3666 "variable `%s' may be clobbered by `longjmp' or `vfork'");
3667 }
3668 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3669 uninitialized_vars_warning (sub);
3670 }
3671
3672 /* Do the appropriate part of uninitialized_vars_warning
3673 but for arguments instead of local variables. */
3674
3675 void
3676 setjmp_args_warning (block)
3677 tree block;
3678 {
3679 register tree decl;
3680 for (decl = DECL_ARGUMENTS (current_function_decl);
3681 decl; decl = TREE_CHAIN (decl))
3682 if (DECL_RTL (decl) != 0
3683 && GET_CODE (DECL_RTL (decl)) == REG
3684 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3685 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp' or `vfork'");
3686 }
3687
3688 /* If this function call setjmp, put all vars into the stack
3689 unless they were declared `register'. */
3690
3691 void
3692 setjmp_protect (block)
3693 tree block;
3694 {
3695 register tree decl, sub;
3696 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3697 if ((TREE_CODE (decl) == VAR_DECL
3698 || TREE_CODE (decl) == PARM_DECL)
3699 && DECL_RTL (decl) != 0
3700 && GET_CODE (DECL_RTL (decl)) == REG
3701 /* If this variable came from an inline function, it must be
3702 that it's life doesn't overlap the setjmp. If there was a
3703 setjmp in the function, it would already be in memory. We
3704 must exclude such variable because their DECL_RTL might be
3705 set to strange things such as virtual_stack_vars_rtx. */
3706 && ! DECL_FROM_INLINE (decl)
3707 && (
3708 #ifdef NON_SAVING_SETJMP
3709 /* If longjmp doesn't restore the registers,
3710 don't put anything in them. */
3711 NON_SAVING_SETJMP
3712 ||
3713 #endif
3714 ! DECL_REGISTER (decl)))
3715 put_var_into_stack (decl);
3716 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3717 setjmp_protect (sub);
3718 }
3719 \f
3720 /* Like the previous function, but for args instead of local variables. */
3721
3722 void
3723 setjmp_protect_args ()
3724 {
3725 register tree decl, sub;
3726 for (decl = DECL_ARGUMENTS (current_function_decl);
3727 decl; decl = TREE_CHAIN (decl))
3728 if ((TREE_CODE (decl) == VAR_DECL
3729 || TREE_CODE (decl) == PARM_DECL)
3730 && DECL_RTL (decl) != 0
3731 && GET_CODE (DECL_RTL (decl)) == REG
3732 && (
3733 /* If longjmp doesn't restore the registers,
3734 don't put anything in them. */
3735 #ifdef NON_SAVING_SETJMP
3736 NON_SAVING_SETJMP
3737 ||
3738 #endif
3739 ! DECL_REGISTER (decl)))
3740 put_var_into_stack (decl);
3741 }
3742 \f
3743 /* Return the context-pointer register corresponding to DECL,
3744 or 0 if it does not need one. */
3745
3746 rtx
3747 lookup_static_chain (decl)
3748 tree decl;
3749 {
3750 tree context = decl_function_context (decl);
3751 tree link;
3752
3753 if (context == 0)
3754 return 0;
3755
3756 /* We treat inline_function_decl as an alias for the current function
3757 because that is the inline function whose vars, types, etc.
3758 are being merged into the current function.
3759 See expand_inline_function. */
3760 if (context == current_function_decl || context == inline_function_decl)
3761 return virtual_stack_vars_rtx;
3762
3763 for (link = context_display; link; link = TREE_CHAIN (link))
3764 if (TREE_PURPOSE (link) == context)
3765 return RTL_EXPR_RTL (TREE_VALUE (link));
3766
3767 abort ();
3768 }
3769 \f
3770 /* Convert a stack slot address ADDR for variable VAR
3771 (from a containing function)
3772 into an address valid in this function (using a static chain). */
3773
3774 rtx
3775 fix_lexical_addr (addr, var)
3776 rtx addr;
3777 tree var;
3778 {
3779 rtx basereg;
3780 int displacement;
3781 tree context = decl_function_context (var);
3782 struct function *fp;
3783 rtx base = 0;
3784
3785 /* If this is the present function, we need not do anything. */
3786 if (context == current_function_decl || context == inline_function_decl)
3787 return addr;
3788
3789 for (fp = outer_function_chain; fp; fp = fp->next)
3790 if (fp->decl == context)
3791 break;
3792
3793 if (fp == 0)
3794 abort ();
3795
3796 /* Decode given address as base reg plus displacement. */
3797 if (GET_CODE (addr) == REG)
3798 basereg = addr, displacement = 0;
3799 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3800 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
3801 else
3802 abort ();
3803
3804 /* We accept vars reached via the containing function's
3805 incoming arg pointer and via its stack variables pointer. */
3806 if (basereg == fp->internal_arg_pointer)
3807 {
3808 /* If reached via arg pointer, get the arg pointer value
3809 out of that function's stack frame.
3810
3811 There are two cases: If a separate ap is needed, allocate a
3812 slot in the outer function for it and dereference it that way.
3813 This is correct even if the real ap is actually a pseudo.
3814 Otherwise, just adjust the offset from the frame pointer to
3815 compensate. */
3816
3817 #ifdef NEED_SEPARATE_AP
3818 rtx addr;
3819
3820 if (fp->arg_pointer_save_area == 0)
3821 fp->arg_pointer_save_area
3822 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
3823
3824 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
3825 addr = memory_address (Pmode, addr);
3826
3827 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
3828 #else
3829 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
3830 base = lookup_static_chain (var);
3831 #endif
3832 }
3833
3834 else if (basereg == virtual_stack_vars_rtx)
3835 {
3836 /* This is the same code as lookup_static_chain, duplicated here to
3837 avoid an extra call to decl_function_context. */
3838 tree link;
3839
3840 for (link = context_display; link; link = TREE_CHAIN (link))
3841 if (TREE_PURPOSE (link) == context)
3842 {
3843 base = RTL_EXPR_RTL (TREE_VALUE (link));
3844 break;
3845 }
3846 }
3847
3848 if (base == 0)
3849 abort ();
3850
3851 /* Use same offset, relative to appropriate static chain or argument
3852 pointer. */
3853 return plus_constant (base, displacement);
3854 }
3855 \f
3856 /* Return the address of the trampoline for entering nested fn FUNCTION.
3857 If necessary, allocate a trampoline (in the stack frame)
3858 and emit rtl to initialize its contents (at entry to this function). */
3859
3860 rtx
3861 trampoline_address (function)
3862 tree function;
3863 {
3864 tree link;
3865 tree rtlexp;
3866 rtx tramp;
3867 struct function *fp;
3868 tree fn_context;
3869
3870 /* Find an existing trampoline and return it. */
3871 for (link = trampoline_list; link; link = TREE_CHAIN (link))
3872 if (TREE_PURPOSE (link) == function)
3873 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
3874 for (fp = outer_function_chain; fp; fp = fp->next)
3875 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
3876 if (TREE_PURPOSE (link) == function)
3877 {
3878 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
3879 function);
3880 return round_trampoline_addr (tramp);
3881 }
3882
3883 /* None exists; we must make one. */
3884
3885 /* Find the `struct function' for the function containing FUNCTION. */
3886 fp = 0;
3887 fn_context = decl_function_context (function);
3888 if (fn_context != current_function_decl)
3889 for (fp = outer_function_chain; fp; fp = fp->next)
3890 if (fp->decl == fn_context)
3891 break;
3892
3893 /* Allocate run-time space for this trampoline
3894 (usually in the defining function's stack frame). */
3895 #ifdef ALLOCATE_TRAMPOLINE
3896 tramp = ALLOCATE_TRAMPOLINE (fp);
3897 #else
3898 /* If rounding needed, allocate extra space
3899 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
3900 #ifdef TRAMPOLINE_ALIGNMENT
3901 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
3902 #else
3903 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
3904 #endif
3905 if (fp != 0)
3906 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
3907 else
3908 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
3909 #endif
3910
3911 /* Record the trampoline for reuse and note it for later initialization
3912 by expand_function_end. */
3913 if (fp != 0)
3914 {
3915 push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack);
3916 rtlexp = make_node (RTL_EXPR);
3917 RTL_EXPR_RTL (rtlexp) = tramp;
3918 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
3919 pop_obstacks ();
3920 }
3921 else
3922 {
3923 /* Make the RTL_EXPR node temporary, not momentary, so that the
3924 trampoline_list doesn't become garbage. */
3925 int momentary = suspend_momentary ();
3926 rtlexp = make_node (RTL_EXPR);
3927 resume_momentary (momentary);
3928
3929 RTL_EXPR_RTL (rtlexp) = tramp;
3930 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
3931 }
3932
3933 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
3934 return round_trampoline_addr (tramp);
3935 }
3936
3937 /* Given a trampoline address,
3938 round it to multiple of TRAMPOLINE_ALIGNMENT. */
3939
3940 static rtx
3941 round_trampoline_addr (tramp)
3942 rtx tramp;
3943 {
3944 #ifdef TRAMPOLINE_ALIGNMENT
3945 /* Round address up to desired boundary. */
3946 rtx temp = gen_reg_rtx (Pmode);
3947 temp = expand_binop (Pmode, add_optab, tramp,
3948 GEN_INT (TRAMPOLINE_ALIGNMENT - 1),
3949 temp, 0, OPTAB_LIB_WIDEN);
3950 tramp = expand_binop (Pmode, and_optab, temp,
3951 GEN_INT (- TRAMPOLINE_ALIGNMENT),
3952 temp, 0, OPTAB_LIB_WIDEN);
3953 #endif
3954 return tramp;
3955 }
3956 \f
3957 /* The functions identify_blocks and reorder_blocks provide a way to
3958 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
3959 duplicate portions of the RTL code. Call identify_blocks before
3960 changing the RTL, and call reorder_blocks after. */
3961
3962 static int all_blocks ();
3963 static tree blocks_nreverse ();
3964
3965 /* Put all this function's BLOCK nodes into a vector, and return it.
3966 Also store in each NOTE for the beginning or end of a block
3967 the index of that block in the vector.
3968 The arguments are TOP_BLOCK, the top-level block of the function,
3969 and INSNS, the insn chain of the function. */
3970
3971 tree *
3972 identify_blocks (top_block, insns)
3973 tree top_block;
3974 rtx insns;
3975 {
3976 int n_blocks;
3977 tree *block_vector;
3978 int *block_stack;
3979 int depth = 0;
3980 int next_block_number = 0;
3981 int current_block_number = 0;
3982 rtx insn;
3983
3984 if (top_block == 0)
3985 return 0;
3986
3987 n_blocks = all_blocks (top_block, 0);
3988 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
3989 block_stack = (int *) alloca (n_blocks * sizeof (int));
3990
3991 all_blocks (top_block, block_vector);
3992
3993 for (insn = insns; insn; insn = NEXT_INSN (insn))
3994 if (GET_CODE (insn) == NOTE)
3995 {
3996 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3997 {
3998 block_stack[depth++] = current_block_number;
3999 current_block_number = next_block_number;
4000 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
4001 }
4002 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4003 {
4004 current_block_number = block_stack[--depth];
4005 NOTE_BLOCK_NUMBER (insn) = current_block_number;
4006 }
4007 }
4008
4009 return block_vector;
4010 }
4011
4012 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4013 and a revised instruction chain, rebuild the tree structure
4014 of BLOCK nodes to correspond to the new order of RTL.
4015 The new block tree is inserted below TOP_BLOCK.
4016 Returns the current top-level block. */
4017
4018 tree
4019 reorder_blocks (block_vector, top_block, insns)
4020 tree *block_vector;
4021 tree top_block;
4022 rtx insns;
4023 {
4024 tree current_block = top_block;
4025 rtx insn;
4026
4027 if (block_vector == 0)
4028 return top_block;
4029
4030 /* Prune the old tree away, so that it doesn't get in the way. */
4031 BLOCK_SUBBLOCKS (current_block) = 0;
4032
4033 for (insn = insns; insn; insn = NEXT_INSN (insn))
4034 if (GET_CODE (insn) == NOTE)
4035 {
4036 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4037 {
4038 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
4039 /* If we have seen this block before, copy it. */
4040 if (TREE_ASM_WRITTEN (block))
4041 block = copy_node (block);
4042 BLOCK_SUBBLOCKS (block) = 0;
4043 TREE_ASM_WRITTEN (block) = 1;
4044 BLOCK_SUPERCONTEXT (block) = current_block;
4045 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4046 BLOCK_SUBBLOCKS (current_block) = block;
4047 current_block = block;
4048 NOTE_SOURCE_FILE (insn) = 0;
4049 }
4050 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4051 {
4052 BLOCK_SUBBLOCKS (current_block)
4053 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4054 current_block = BLOCK_SUPERCONTEXT (current_block);
4055 NOTE_SOURCE_FILE (insn) = 0;
4056 }
4057 }
4058
4059 return current_block;
4060 }
4061
4062 /* Reverse the order of elements in the chain T of blocks,
4063 and return the new head of the chain (old last element). */
4064
4065 static tree
4066 blocks_nreverse (t)
4067 tree t;
4068 {
4069 register tree prev = 0, decl, next;
4070 for (decl = t; decl; decl = next)
4071 {
4072 next = BLOCK_CHAIN (decl);
4073 BLOCK_CHAIN (decl) = prev;
4074 prev = decl;
4075 }
4076 return prev;
4077 }
4078
4079 /* Count the subblocks of BLOCK, and list them all into the vector VECTOR.
4080 Also clear TREE_ASM_WRITTEN in all blocks. */
4081
4082 static int
4083 all_blocks (block, vector)
4084 tree block;
4085 tree *vector;
4086 {
4087 int n_blocks = 1;
4088 tree subblocks;
4089
4090 TREE_ASM_WRITTEN (block) = 0;
4091 /* Record this block. */
4092 if (vector)
4093 vector[0] = block;
4094
4095 /* Record the subblocks, and their subblocks. */
4096 for (subblocks = BLOCK_SUBBLOCKS (block);
4097 subblocks; subblocks = BLOCK_CHAIN (subblocks))
4098 n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0);
4099
4100 return n_blocks;
4101 }
4102 \f
4103 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4104 and initialize static variables for generating RTL for the statements
4105 of the function. */
4106
4107 void
4108 init_function_start (subr, filename, line)
4109 tree subr;
4110 char *filename;
4111 int line;
4112 {
4113 char *junk;
4114
4115 init_stmt_for_function ();
4116
4117 cse_not_expected = ! optimize;
4118
4119 /* Caller save not needed yet. */
4120 caller_save_needed = 0;
4121
4122 /* No stack slots have been made yet. */
4123 stack_slot_list = 0;
4124
4125 /* There is no stack slot for handling nonlocal gotos. */
4126 nonlocal_goto_handler_slot = 0;
4127 nonlocal_goto_stack_level = 0;
4128
4129 /* No labels have been declared for nonlocal use. */
4130 nonlocal_labels = 0;
4131
4132 /* No function calls so far in this function. */
4133 function_call_count = 0;
4134
4135 /* No parm regs have been allocated.
4136 (This is important for output_inline_function.) */
4137 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4138
4139 /* Initialize the RTL mechanism. */
4140 init_emit ();
4141
4142 /* Initialize the queue of pending postincrement and postdecrements,
4143 and some other info in expr.c. */
4144 init_expr ();
4145
4146 /* We haven't done register allocation yet. */
4147 reg_renumber = 0;
4148
4149 init_const_rtx_hash_table ();
4150
4151 current_function_name = (*decl_printable_name) (subr, &junk);
4152
4153 /* Nonzero if this is a nested function that uses a static chain. */
4154
4155 current_function_needs_context
4156 = (decl_function_context (current_function_decl) != 0);
4157
4158 /* Set if a call to setjmp is seen. */
4159 current_function_calls_setjmp = 0;
4160
4161 /* Set if a call to longjmp is seen. */
4162 current_function_calls_longjmp = 0;
4163
4164 current_function_calls_alloca = 0;
4165 current_function_has_nonlocal_label = 0;
4166 current_function_contains_functions = 0;
4167
4168 current_function_returns_pcc_struct = 0;
4169 current_function_returns_struct = 0;
4170 current_function_epilogue_delay_list = 0;
4171 current_function_uses_const_pool = 0;
4172 current_function_uses_pic_offset_table = 0;
4173
4174 /* We have not yet needed to make a label to jump to for tail-recursion. */
4175 tail_recursion_label = 0;
4176
4177 /* We haven't had a need to make a save area for ap yet. */
4178
4179 arg_pointer_save_area = 0;
4180
4181 /* No stack slots allocated yet. */
4182 frame_offset = 0;
4183
4184 /* No SAVE_EXPRs in this function yet. */
4185 save_expr_regs = 0;
4186
4187 /* No RTL_EXPRs in this function yet. */
4188 rtl_expr_chain = 0;
4189
4190 /* We have not allocated any temporaries yet. */
4191 temp_slots = 0;
4192 temp_slot_level = 0;
4193
4194 /* Within function body, compute a type's size as soon it is laid out. */
4195 immediate_size_expand++;
4196
4197 init_pending_stack_adjust ();
4198 inhibit_defer_pop = 0;
4199
4200 current_function_outgoing_args_size = 0;
4201
4202 /* Initialize the insn lengths. */
4203 init_insn_lengths ();
4204
4205 /* Prevent ever trying to delete the first instruction of a function.
4206 Also tell final how to output a linenum before the function prologue. */
4207 emit_line_note (filename, line);
4208
4209 /* Make sure first insn is a note even if we don't want linenums.
4210 This makes sure the first insn will never be deleted.
4211 Also, final expects a note to appear there. */
4212 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4213
4214 /* Set flags used by final.c. */
4215 if (aggregate_value_p (DECL_RESULT (subr)))
4216 {
4217 #ifdef PCC_STATIC_STRUCT_RETURN
4218 current_function_returns_pcc_struct = 1;
4219 #endif
4220 current_function_returns_struct = 1;
4221 }
4222
4223 /* Warn if this value is an aggregate type,
4224 regardless of which calling convention we are using for it. */
4225 if (warn_aggregate_return
4226 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
4227 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
4228 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == QUAL_UNION_TYPE
4229 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
4230 warning ("function returns an aggregate");
4231
4232 current_function_returns_pointer
4233 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
4234
4235 /* Indicate that we need to distinguish between the return value of the
4236 present function and the return value of a function being called. */
4237 rtx_equal_function_value_matters = 1;
4238
4239 /* Indicate that we have not instantiated virtual registers yet. */
4240 virtuals_instantiated = 0;
4241
4242 /* Indicate we have no need of a frame pointer yet. */
4243 frame_pointer_needed = 0;
4244
4245 /* By default assume not varargs. */
4246 current_function_varargs = 0;
4247 }
4248
4249 /* Indicate that the current function uses extra args
4250 not explicitly mentioned in the argument list in any fashion. */
4251
4252 void
4253 mark_varargs ()
4254 {
4255 current_function_varargs = 1;
4256 }
4257
4258 /* Expand a call to __main at the beginning of a possible main function. */
4259
4260 void
4261 expand_main_function ()
4262 {
4263 #if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
4264 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__main"), 0,
4265 VOIDmode, 0);
4266 #endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
4267 }
4268 \f
4269 /* Start the RTL for a new function, and set variables used for
4270 emitting RTL.
4271 SUBR is the FUNCTION_DECL node.
4272 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4273 the function's parameters, which must be run at any return statement. */
4274
4275 void
4276 expand_function_start (subr, parms_have_cleanups)
4277 tree subr;
4278 int parms_have_cleanups;
4279 {
4280 register int i;
4281 tree tem;
4282 rtx last_ptr;
4283
4284 /* Make sure volatile mem refs aren't considered
4285 valid operands of arithmetic insns. */
4286 init_recog_no_volatile ();
4287
4288 /* If function gets a static chain arg, store it in the stack frame.
4289 Do this first, so it gets the first stack slot offset. */
4290 if (current_function_needs_context)
4291 {
4292 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4293 emit_move_insn (last_ptr, static_chain_incoming_rtx);
4294 }
4295
4296 /* If the parameters of this function need cleaning up, get a label
4297 for the beginning of the code which executes those cleanups. This must
4298 be done before doing anything with return_label. */
4299 if (parms_have_cleanups)
4300 cleanup_label = gen_label_rtx ();
4301 else
4302 cleanup_label = 0;
4303
4304 /* Make the label for return statements to jump to, if this machine
4305 does not have a one-instruction return and uses an epilogue,
4306 or if it returns a structure, or if it has parm cleanups. */
4307 #ifdef HAVE_return
4308 if (cleanup_label == 0 && HAVE_return
4309 && ! current_function_returns_pcc_struct
4310 && ! (current_function_returns_struct && ! optimize))
4311 return_label = 0;
4312 else
4313 return_label = gen_label_rtx ();
4314 #else
4315 return_label = gen_label_rtx ();
4316 #endif
4317
4318 /* Initialize rtx used to return the value. */
4319 /* Do this before assign_parms so that we copy the struct value address
4320 before any library calls that assign parms might generate. */
4321
4322 /* Decide whether to return the value in memory or in a register. */
4323 if (aggregate_value_p (DECL_RESULT (subr)))
4324 {
4325 /* Returning something that won't go in a register. */
4326 register rtx value_address;
4327
4328 #ifdef PCC_STATIC_STRUCT_RETURN
4329 if (current_function_returns_pcc_struct)
4330 {
4331 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4332 value_address = assemble_static_space (size);
4333 }
4334 else
4335 #endif
4336 {
4337 /* Expect to be passed the address of a place to store the value.
4338 If it is passed as an argument, assign_parms will take care of
4339 it. */
4340 if (struct_value_incoming_rtx)
4341 {
4342 value_address = gen_reg_rtx (Pmode);
4343 emit_move_insn (value_address, struct_value_incoming_rtx);
4344 }
4345 }
4346 if (value_address)
4347 DECL_RTL (DECL_RESULT (subr))
4348 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
4349 value_address);
4350 }
4351 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4352 /* If return mode is void, this decl rtl should not be used. */
4353 DECL_RTL (DECL_RESULT (subr)) = 0;
4354 else if (parms_have_cleanups)
4355 {
4356 /* If function will end with cleanup code for parms,
4357 compute the return values into a pseudo reg,
4358 which we will copy into the true return register
4359 after the cleanups are done. */
4360
4361 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
4362 #ifdef PROMOTE_FUNCTION_RETURN
4363 tree type = TREE_TYPE (DECL_RESULT (subr));
4364 int unsignedp = TREE_UNSIGNED (type);
4365
4366 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
4367 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
4368 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
4369 || TREE_CODE (type) == OFFSET_TYPE)
4370 {
4371 PROMOTE_MODE (mode, unsignedp, type);
4372 }
4373 #endif
4374
4375 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
4376 }
4377 else
4378 /* Scalar, returned in a register. */
4379 {
4380 #ifdef FUNCTION_OUTGOING_VALUE
4381 DECL_RTL (DECL_RESULT (subr))
4382 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4383 #else
4384 DECL_RTL (DECL_RESULT (subr))
4385 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4386 #endif
4387
4388 /* Mark this reg as the function's return value. */
4389 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
4390 {
4391 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
4392 /* Needed because we may need to move this to memory
4393 in case it's a named return value whose address is taken. */
4394 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4395 }
4396 }
4397
4398 /* Initialize rtx for parameters and local variables.
4399 In some cases this requires emitting insns. */
4400
4401 assign_parms (subr, 0);
4402
4403 /* The following was moved from init_function_start.
4404 The move is supposed to make sdb output more accurate. */
4405 /* Indicate the beginning of the function body,
4406 as opposed to parm setup. */
4407 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
4408
4409 /* If doing stupid allocation, mark parms as born here. */
4410
4411 if (GET_CODE (get_last_insn ()) != NOTE)
4412 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4413 parm_birth_insn = get_last_insn ();
4414
4415 if (obey_regdecls)
4416 {
4417 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4418 use_variable (regno_reg_rtx[i]);
4419
4420 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4421 use_variable (current_function_internal_arg_pointer);
4422 }
4423
4424 /* Fetch static chain values for containing functions. */
4425 tem = decl_function_context (current_function_decl);
4426 /* If not doing stupid register allocation, then start off with the static
4427 chain pointer in a pseudo register. Otherwise, we use the stack
4428 address that was generated above. */
4429 if (tem && ! obey_regdecls)
4430 last_ptr = copy_to_reg (static_chain_incoming_rtx);
4431 context_display = 0;
4432 while (tem)
4433 {
4434 tree rtlexp = make_node (RTL_EXPR);
4435
4436 RTL_EXPR_RTL (rtlexp) = last_ptr;
4437 context_display = tree_cons (tem, rtlexp, context_display);
4438 tem = decl_function_context (tem);
4439 if (tem == 0)
4440 break;
4441 /* Chain thru stack frames, assuming pointer to next lexical frame
4442 is found at the place we always store it. */
4443 #ifdef FRAME_GROWS_DOWNWARD
4444 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
4445 #endif
4446 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
4447 memory_address (Pmode, last_ptr)));
4448 }
4449
4450 /* After the display initializations is where the tail-recursion label
4451 should go, if we end up needing one. Ensure we have a NOTE here
4452 since some things (like trampolines) get placed before this. */
4453 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
4454
4455 /* Evaluate now the sizes of any types declared among the arguments. */
4456 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
4457 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
4458
4459 /* Make sure there is a line number after the function entry setup code. */
4460 force_next_line_note ();
4461 }
4462 \f
4463 /* Generate RTL for the end of the current function.
4464 FILENAME and LINE are the current position in the source file. */
4465
4466 /* It is up to language-specific callers to do cleanups for parameters. */
4467
4468 void
4469 expand_function_end (filename, line)
4470 char *filename;
4471 int line;
4472 {
4473 register int i;
4474 tree link;
4475
4476 static rtx initial_trampoline;
4477
4478 #ifdef NON_SAVING_SETJMP
4479 /* Don't put any variables in registers if we call setjmp
4480 on a machine that fails to restore the registers. */
4481 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
4482 {
4483 setjmp_protect (DECL_INITIAL (current_function_decl));
4484 setjmp_protect_args ();
4485 }
4486 #endif
4487
4488 /* Save the argument pointer if a save area was made for it. */
4489 if (arg_pointer_save_area)
4490 {
4491 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
4492 emit_insn_before (x, tail_recursion_reentry);
4493 }
4494
4495 /* Initialize any trampolines required by this function. */
4496 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4497 {
4498 tree function = TREE_PURPOSE (link);
4499 rtx context = lookup_static_chain (function);
4500 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
4501 rtx seq;
4502
4503 /* First make sure this compilation has a template for
4504 initializing trampolines. */
4505 if (initial_trampoline == 0)
4506 {
4507 end_temporary_allocation ();
4508 initial_trampoline
4509 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
4510 resume_temporary_allocation ();
4511 }
4512
4513 /* Generate insns to initialize the trampoline. */
4514 start_sequence ();
4515 tramp = change_address (initial_trampoline, BLKmode,
4516 round_trampoline_addr (XEXP (tramp, 0)));
4517 emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE),
4518 FUNCTION_BOUNDARY / BITS_PER_UNIT);
4519 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
4520 XEXP (DECL_RTL (function), 0), context);
4521 seq = get_insns ();
4522 end_sequence ();
4523
4524 /* Put those insns at entry to the containing function (this one). */
4525 emit_insns_before (seq, tail_recursion_reentry);
4526 }
4527 /* Clear the trampoline_list for the next function. */
4528 trampoline_list = 0;
4529
4530 #if 0 /* I think unused parms are legitimate enough. */
4531 /* Warn about unused parms. */
4532 if (warn_unused)
4533 {
4534 rtx decl;
4535
4536 for (decl = DECL_ARGUMENTS (current_function_decl);
4537 decl; decl = TREE_CHAIN (decl))
4538 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
4539 warning_with_decl (decl, "unused parameter `%s'");
4540 }
4541 #endif
4542
4543 /* Delete handlers for nonlocal gotos if nothing uses them. */
4544 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
4545 delete_handlers ();
4546
4547 /* End any sequences that failed to be closed due to syntax errors. */
4548 while (in_sequence_p ())
4549 end_sequence ();
4550
4551 /* Outside function body, can't compute type's actual size
4552 until next function's body starts. */
4553 immediate_size_expand--;
4554
4555 /* If doing stupid register allocation,
4556 mark register parms as dying here. */
4557
4558 if (obey_regdecls)
4559 {
4560 rtx tem;
4561 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4562 use_variable (regno_reg_rtx[i]);
4563
4564 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
4565
4566 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
4567 {
4568 use_variable (XEXP (tem, 0));
4569 use_variable_after (XEXP (tem, 0), parm_birth_insn);
4570 }
4571
4572 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4573 use_variable (current_function_internal_arg_pointer);
4574 }
4575
4576 clear_pending_stack_adjust ();
4577 do_pending_stack_adjust ();
4578
4579 /* Mark the end of the function body.
4580 If control reaches this insn, the function can drop through
4581 without returning a value. */
4582 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
4583
4584 /* Output a linenumber for the end of the function.
4585 SDB depends on this. */
4586 emit_line_note_force (filename, line);
4587
4588 /* Output the label for the actual return from the function,
4589 if one is expected. This happens either because a function epilogue
4590 is used instead of a return instruction, or because a return was done
4591 with a goto in order to run local cleanups, or because of pcc-style
4592 structure returning. */
4593
4594 if (return_label)
4595 emit_label (return_label);
4596
4597 /* If we had calls to alloca, and this machine needs
4598 an accurate stack pointer to exit the function,
4599 insert some code to save and restore the stack pointer. */
4600 #ifdef EXIT_IGNORE_STACK
4601 if (! EXIT_IGNORE_STACK)
4602 #endif
4603 if (current_function_calls_alloca)
4604 {
4605 rtx tem = 0;
4606
4607 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4608 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4609 }
4610
4611 /* If scalar return value was computed in a pseudo-reg,
4612 copy that to the hard return register. */
4613 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
4614 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
4615 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
4616 >= FIRST_PSEUDO_REGISTER))
4617 {
4618 rtx real_decl_result;
4619
4620 #ifdef FUNCTION_OUTGOING_VALUE
4621 real_decl_result
4622 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4623 current_function_decl);
4624 #else
4625 real_decl_result
4626 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
4627 current_function_decl);
4628 #endif
4629 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
4630 emit_move_insn (real_decl_result,
4631 DECL_RTL (DECL_RESULT (current_function_decl)));
4632 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
4633 }
4634
4635 /* If returning a structure, arrange to return the address of the value
4636 in a place where debuggers expect to find it.
4637
4638 If returning a structure PCC style,
4639 the caller also depends on this value.
4640 And current_function_returns_pcc_struct is not necessarily set. */
4641 if (current_function_returns_struct
4642 || current_function_returns_pcc_struct)
4643 {
4644 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4645 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4646 #ifdef FUNCTION_OUTGOING_VALUE
4647 rtx outgoing
4648 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4649 current_function_decl);
4650 #else
4651 rtx outgoing
4652 = FUNCTION_VALUE (build_pointer_type (type),
4653 current_function_decl);
4654 #endif
4655
4656 /* Mark this as a function return value so integrate will delete the
4657 assignment and USE below when inlining this function. */
4658 REG_FUNCTION_VALUE_P (outgoing) = 1;
4659
4660 emit_move_insn (outgoing, value_address);
4661 use_variable (outgoing);
4662 }
4663
4664 /* Output a return insn if we are using one.
4665 Otherwise, let the rtl chain end here, to drop through
4666 into the epilogue. */
4667
4668 #ifdef HAVE_return
4669 if (HAVE_return)
4670 {
4671 emit_jump_insn (gen_return ());
4672 emit_barrier ();
4673 }
4674 #endif
4675
4676 /* Fix up any gotos that jumped out to the outermost
4677 binding level of the function.
4678 Must follow emitting RETURN_LABEL. */
4679
4680 /* If you have any cleanups to do at this point,
4681 and they need to create temporary variables,
4682 then you will lose. */
4683 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0);
4684 }
4685 \f
4686 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
4687
4688 static int *prologue;
4689 static int *epilogue;
4690
4691 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
4692 or a single insn). */
4693
4694 static int *
4695 record_insns (insns)
4696 rtx insns;
4697 {
4698 int *vec;
4699
4700 if (GET_CODE (insns) == SEQUENCE)
4701 {
4702 int len = XVECLEN (insns, 0);
4703 vec = (int *) oballoc ((len + 1) * sizeof (int));
4704 vec[len] = 0;
4705 while (--len >= 0)
4706 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
4707 }
4708 else
4709 {
4710 vec = (int *) oballoc (2 * sizeof (int));
4711 vec[0] = INSN_UID (insns);
4712 vec[1] = 0;
4713 }
4714 return vec;
4715 }
4716
4717 /* Determine how many INSN_UIDs in VEC are part of INSN. */
4718
4719 static int
4720 contains (insn, vec)
4721 rtx insn;
4722 int *vec;
4723 {
4724 register int i, j;
4725
4726 if (GET_CODE (insn) == INSN
4727 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4728 {
4729 int count = 0;
4730 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4731 for (j = 0; vec[j]; j++)
4732 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
4733 count++;
4734 return count;
4735 }
4736 else
4737 {
4738 for (j = 0; vec[j]; j++)
4739 if (INSN_UID (insn) == vec[j])
4740 return 1;
4741 }
4742 return 0;
4743 }
4744
4745 /* Generate the prologe and epilogue RTL if the machine supports it. Thread
4746 this into place with notes indicating where the prologue ends and where
4747 the epilogue begins. Update the basic block information when possible. */
4748
4749 void
4750 thread_prologue_and_epilogue_insns (f)
4751 rtx f;
4752 {
4753 #ifdef HAVE_prologue
4754 if (HAVE_prologue)
4755 {
4756 rtx head, seq, insn;
4757
4758 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
4759 prologue insns and a NOTE_INSN_PROLOGUE_END. */
4760 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
4761 seq = gen_prologue ();
4762 head = emit_insn_after (seq, f);
4763
4764 /* Include the new prologue insns in the first block. Ignore them
4765 if they form a basic block unto themselves. */
4766 if (basic_block_head && n_basic_blocks
4767 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
4768 basic_block_head[0] = NEXT_INSN (f);
4769
4770 /* Retain a map of the prologue insns. */
4771 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
4772 }
4773 else
4774 #endif
4775 prologue = 0;
4776
4777 #ifdef HAVE_epilogue
4778 if (HAVE_epilogue)
4779 {
4780 rtx insn = get_last_insn ();
4781 rtx prev = prev_nonnote_insn (insn);
4782
4783 /* If we end with a BARRIER, we don't need an epilogue. */
4784 if (! (prev && GET_CODE (prev) == BARRIER))
4785 {
4786 rtx tail, seq;
4787
4788 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG,
4789 the epilogue insns (this must include the jump insn that
4790 returns), USE insns ad the end of a function, and a BARRIER. */
4791
4792 emit_barrier_after (insn);
4793
4794 /* Place the epilogue before the USE insns at the end of a
4795 function. */
4796 while (prev
4797 && GET_CODE (prev) == INSN
4798 && GET_CODE (PATTERN (prev)) == USE)
4799 {
4800 insn = PREV_INSN (prev);
4801 prev = prev_nonnote_insn (prev);
4802 }
4803
4804 seq = gen_epilogue ();
4805 tail = emit_jump_insn_after (seq, insn);
4806 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
4807
4808 /* Include the new epilogue insns in the last block. Ignore
4809 them if they form a basic block unto themselves. */
4810 if (basic_block_end && n_basic_blocks
4811 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
4812 basic_block_end[n_basic_blocks - 1] = tail;
4813
4814 /* Retain a map of the epilogue insns. */
4815 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
4816 return;
4817 }
4818 }
4819 #endif
4820 epilogue = 0;
4821 }
4822
4823 /* Reposition the prologue-end and epilogue-begin notes after instruction
4824 scheduling and delayed branch scheduling. */
4825
4826 void
4827 reposition_prologue_and_epilogue_notes (f)
4828 rtx f;
4829 {
4830 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
4831 /* Reposition the prologue and epilogue notes. */
4832 if (n_basic_blocks)
4833 {
4834 rtx next, prev;
4835 int len;
4836
4837 if (prologue)
4838 {
4839 register rtx insn, note = 0;
4840
4841 /* Scan from the beginning until we reach the last prologue insn.
4842 We apparently can't depend on basic_block_{head,end} after
4843 reorg has run. */
4844 for (len = 0; prologue[len]; len++)
4845 ;
4846 for (insn = f; len && insn; insn = NEXT_INSN (insn))
4847 {
4848 if (GET_CODE (insn) == NOTE)
4849 {
4850 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
4851 note = insn;
4852 }
4853 else if ((len -= contains (insn, prologue)) == 0)
4854 {
4855 /* Find the prologue-end note if we haven't already, and
4856 move it to just after the last prologue insn. */
4857 if (note == 0)
4858 {
4859 for (note = insn; note = NEXT_INSN (note);)
4860 if (GET_CODE (note) == NOTE
4861 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
4862 break;
4863 }
4864 next = NEXT_INSN (note);
4865 prev = PREV_INSN (note);
4866 if (prev)
4867 NEXT_INSN (prev) = next;
4868 if (next)
4869 PREV_INSN (next) = prev;
4870 add_insn_after (note, insn);
4871 }
4872 }
4873 }
4874
4875 if (epilogue)
4876 {
4877 register rtx insn, note = 0;
4878
4879 /* Scan from the end until we reach the first epilogue insn.
4880 We apparently can't depend on basic_block_{head,end} after
4881 reorg has run. */
4882 for (len = 0; epilogue[len]; len++)
4883 ;
4884 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
4885 {
4886 if (GET_CODE (insn) == NOTE)
4887 {
4888 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4889 note = insn;
4890 }
4891 else if ((len -= contains (insn, epilogue)) == 0)
4892 {
4893 /* Find the epilogue-begin note if we haven't already, and
4894 move it to just before the first epilogue insn. */
4895 if (note == 0)
4896 {
4897 for (note = insn; note = PREV_INSN (note);)
4898 if (GET_CODE (note) == NOTE
4899 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
4900 break;
4901 }
4902 next = NEXT_INSN (note);
4903 prev = PREV_INSN (note);
4904 if (prev)
4905 NEXT_INSN (prev) = next;
4906 if (next)
4907 PREV_INSN (next) = prev;
4908 add_insn_after (note, PREV_INSN (insn));
4909 }
4910 }
4911 }
4912 }
4913 #endif /* HAVE_prologue or HAVE_epilogue */
4914 }
This page took 0.253414 seconds and 5 git commands to generate.