]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
Update FSF address.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-94, 1995 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42
43 #include <stdio.h>
44
45 #include "rtl.h"
46 #include "tree.h"
47 #include "flags.h"
48 #include "function.h"
49 #include "insn-flags.h"
50 #include "expr.h"
51 #include "insn-codes.h"
52 #include "regs.h"
53 #include "hard-reg-set.h"
54 #include "insn-config.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "basic-block.h"
58 #include "obstack.h"
59 #include "bytecode.h"
60
61 /* Some systems use __main in a way incompatible with its use in gcc, in these
62 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
63 give the same symbol without quotes for an alternative entry point. You
64 must define both, or neither. */
65 #ifndef NAME__MAIN
66 #define NAME__MAIN "__main"
67 #define SYMBOL__MAIN __main
68 #endif
69
70 /* Round a value to the lowest integer less than it that is a multiple of
71 the required alignment. Avoid using division in case the value is
72 negative. Assume the alignment is a power of two. */
73 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
74
75 /* Similar, but round to the next highest integer that meets the
76 alignment. */
77 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
78
79 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
80 during rtl generation. If they are different register numbers, this is
81 always true. It may also be true if
82 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
83 generation. See fix_lexical_addr for details. */
84
85 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
86 #define NEED_SEPARATE_AP
87 #endif
88
89 /* Number of bytes of args popped by function being compiled on its return.
90 Zero if no bytes are to be popped.
91 May affect compilation of return insn or of function epilogue. */
92
93 int current_function_pops_args;
94
95 /* Nonzero if function being compiled needs to be given an address
96 where the value should be stored. */
97
98 int current_function_returns_struct;
99
100 /* Nonzero if function being compiled needs to
101 return the address of where it has put a structure value. */
102
103 int current_function_returns_pcc_struct;
104
105 /* Nonzero if function being compiled needs to be passed a static chain. */
106
107 int current_function_needs_context;
108
109 /* Nonzero if function being compiled can call setjmp. */
110
111 int current_function_calls_setjmp;
112
113 /* Nonzero if function being compiled can call longjmp. */
114
115 int current_function_calls_longjmp;
116
117 /* Nonzero if function being compiled receives nonlocal gotos
118 from nested functions. */
119
120 int current_function_has_nonlocal_label;
121
122 /* Nonzero if function being compiled has nonlocal gotos to parent
123 function. */
124
125 int current_function_has_nonlocal_goto;
126
127 /* Nonzero if function being compiled contains nested functions. */
128
129 int current_function_contains_functions;
130
131 /* Nonzero if function being compiled can call alloca,
132 either as a subroutine or builtin. */
133
134 int current_function_calls_alloca;
135
136 /* Nonzero if the current function returns a pointer type */
137
138 int current_function_returns_pointer;
139
140 /* If some insns can be deferred to the delay slots of the epilogue, the
141 delay list for them is recorded here. */
142
143 rtx current_function_epilogue_delay_list;
144
145 /* If function's args have a fixed size, this is that size, in bytes.
146 Otherwise, it is -1.
147 May affect compilation of return insn or of function epilogue. */
148
149 int current_function_args_size;
150
151 /* # bytes the prologue should push and pretend that the caller pushed them.
152 The prologue must do this, but only if parms can be passed in registers. */
153
154 int current_function_pretend_args_size;
155
156 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
157 defined, the needed space is pushed by the prologue. */
158
159 int current_function_outgoing_args_size;
160
161 /* This is the offset from the arg pointer to the place where the first
162 anonymous arg can be found, if there is one. */
163
164 rtx current_function_arg_offset_rtx;
165
166 /* Nonzero if current function uses varargs.h or equivalent.
167 Zero for functions that use stdarg.h. */
168
169 int current_function_varargs;
170
171 /* Quantities of various kinds of registers
172 used for the current function's args. */
173
174 CUMULATIVE_ARGS current_function_args_info;
175
176 /* Name of function now being compiled. */
177
178 char *current_function_name;
179
180 /* If non-zero, an RTL expression for that location at which the current
181 function returns its result. Always equal to
182 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
183 independently of the tree structures. */
184
185 rtx current_function_return_rtx;
186
187 /* Nonzero if the current function uses the constant pool. */
188
189 int current_function_uses_const_pool;
190
191 /* Nonzero if the current function uses pic_offset_table_rtx. */
192 int current_function_uses_pic_offset_table;
193
194 /* The arg pointer hard register, or the pseudo into which it was copied. */
195 rtx current_function_internal_arg_pointer;
196
197 /* The FUNCTION_DECL for an inline function currently being expanded. */
198 tree inline_function_decl;
199
200 /* Number of function calls seen so far in current function. */
201
202 int function_call_count;
203
204 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
205 (labels to which there can be nonlocal gotos from nested functions)
206 in this function. */
207
208 tree nonlocal_labels;
209
210 /* RTX for stack slot that holds the current handler for nonlocal gotos.
211 Zero when function does not have nonlocal labels. */
212
213 rtx nonlocal_goto_handler_slot;
214
215 /* RTX for stack slot that holds the stack pointer value to restore
216 for a nonlocal goto.
217 Zero when function does not have nonlocal labels. */
218
219 rtx nonlocal_goto_stack_level;
220
221 /* Label that will go on parm cleanup code, if any.
222 Jumping to this label runs cleanup code for parameters, if
223 such code must be run. Following this code is the logical return label. */
224
225 rtx cleanup_label;
226
227 /* Label that will go on function epilogue.
228 Jumping to this label serves as a "return" instruction
229 on machines which require execution of the epilogue on all returns. */
230
231 rtx return_label;
232
233 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
234 So we can mark them all live at the end of the function, if nonopt. */
235 rtx save_expr_regs;
236
237 /* List (chain of EXPR_LISTs) of all stack slots in this function.
238 Made for the sake of unshare_all_rtl. */
239 rtx stack_slot_list;
240
241 /* Chain of all RTL_EXPRs that have insns in them. */
242 tree rtl_expr_chain;
243
244 /* Label to jump back to for tail recursion, or 0 if we have
245 not yet needed one for this function. */
246 rtx tail_recursion_label;
247
248 /* Place after which to insert the tail_recursion_label if we need one. */
249 rtx tail_recursion_reentry;
250
251 /* Location at which to save the argument pointer if it will need to be
252 referenced. There are two cases where this is done: if nonlocal gotos
253 exist, or if vars stored at an offset from the argument pointer will be
254 needed by inner routines. */
255
256 rtx arg_pointer_save_area;
257
258 /* Offset to end of allocated area of stack frame.
259 If stack grows down, this is the address of the last stack slot allocated.
260 If stack grows up, this is the address for the next slot. */
261 int frame_offset;
262
263 /* List (chain of TREE_LISTs) of static chains for containing functions.
264 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
265 in an RTL_EXPR in the TREE_VALUE. */
266 static tree context_display;
267
268 /* List (chain of TREE_LISTs) of trampolines for nested functions.
269 The trampoline sets up the static chain and jumps to the function.
270 We supply the trampoline's address when the function's address is requested.
271
272 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
273 in an RTL_EXPR in the TREE_VALUE. */
274 static tree trampoline_list;
275
276 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
277 static rtx parm_birth_insn;
278
279 #if 0
280 /* Nonzero if a stack slot has been generated whose address is not
281 actually valid. It means that the generated rtl must all be scanned
282 to detect and correct the invalid addresses where they occur. */
283 static int invalid_stack_slot;
284 #endif
285
286 /* Last insn of those whose job was to put parms into their nominal homes. */
287 static rtx last_parm_insn;
288
289 /* 1 + last pseudo register number used for loading a copy
290 of a parameter of this function. */
291 static int max_parm_reg;
292
293 /* Vector indexed by REGNO, containing location on stack in which
294 to put the parm which is nominally in pseudo register REGNO,
295 if we discover that that parm must go in the stack. */
296 static rtx *parm_reg_stack_loc;
297
298 #if 0 /* Turned off because 0 seems to work just as well. */
299 /* Cleanup lists are required for binding levels regardless of whether
300 that binding level has cleanups or not. This node serves as the
301 cleanup list whenever an empty list is required. */
302 static tree empty_cleanup_list;
303 #endif
304
305 /* Nonzero once virtual register instantiation has been done.
306 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
307 static int virtuals_instantiated;
308
309 /* These variables hold pointers to functions to
310 save and restore machine-specific data,
311 in push_function_context and pop_function_context. */
312 void (*save_machine_status) ();
313 void (*restore_machine_status) ();
314
315 /* Nonzero if we need to distinguish between the return value of this function
316 and the return value of a function called by this function. This helps
317 integrate.c */
318
319 extern int rtx_equal_function_value_matters;
320 extern tree sequence_rtl_expr;
321 extern tree bc_runtime_type_code ();
322 extern rtx bc_build_calldesc ();
323 extern char *bc_emit_trampoline ();
324 extern char *bc_end_function ();
325 \f
326 /* In order to evaluate some expressions, such as function calls returning
327 structures in memory, we need to temporarily allocate stack locations.
328 We record each allocated temporary in the following structure.
329
330 Associated with each temporary slot is a nesting level. When we pop up
331 one level, all temporaries associated with the previous level are freed.
332 Normally, all temporaries are freed after the execution of the statement
333 in which they were created. However, if we are inside a ({...}) grouping,
334 the result may be in a temporary and hence must be preserved. If the
335 result could be in a temporary, we preserve it if we can determine which
336 one it is in. If we cannot determine which temporary may contain the
337 result, all temporaries are preserved. A temporary is preserved by
338 pretending it was allocated at the previous nesting level.
339
340 Automatic variables are also assigned temporary slots, at the nesting
341 level where they are defined. They are marked a "kept" so that
342 free_temp_slots will not free them. */
343
344 struct temp_slot
345 {
346 /* Points to next temporary slot. */
347 struct temp_slot *next;
348 /* The rtx to used to reference the slot. */
349 rtx slot;
350 /* The rtx used to represent the address if not the address of the
351 slot above. May be an EXPR_LIST if multiple addresses exist. */
352 rtx address;
353 /* The size, in units, of the slot. */
354 int size;
355 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
356 tree rtl_expr;
357 /* Non-zero if this temporary is currently in use. */
358 char in_use;
359 /* Non-zero if this temporary has its address taken. */
360 char addr_taken;
361 /* Nesting level at which this slot is being used. */
362 int level;
363 /* Non-zero if this should survive a call to free_temp_slots. */
364 int keep;
365 };
366
367 /* List of all temporaries allocated, both available and in use. */
368
369 struct temp_slot *temp_slots;
370
371 /* Current nesting level for temporaries. */
372
373 int temp_slot_level;
374 \f
375 /* The FUNCTION_DECL node for the current function. */
376 static tree this_function_decl;
377
378 /* Callinfo pointer for the current function. */
379 static rtx this_function_callinfo;
380
381 /* The label in the bytecode file of this function's actual bytecode.
382 Not an rtx. */
383 static char *this_function_bytecode;
384
385 /* The call description vector for the current function. */
386 static rtx this_function_calldesc;
387
388 /* Size of the local variables allocated for the current function. */
389 int local_vars_size;
390
391 /* Current depth of the bytecode evaluation stack. */
392 int stack_depth;
393
394 /* Maximum depth of the evaluation stack in this function. */
395 int max_stack_depth;
396
397 /* Current depth in statement expressions. */
398 static int stmt_expr_depth;
399
400 /* This structure is used to record MEMs or pseudos used to replace VAR, any
401 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
402 maintain this list in case two operands of an insn were required to match;
403 in that case we must ensure we use the same replacement. */
404
405 struct fixup_replacement
406 {
407 rtx old;
408 rtx new;
409 struct fixup_replacement *next;
410 };
411
412 /* Forward declarations. */
413
414 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
415 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
416 enum machine_mode, enum machine_mode));
417 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
418 static struct fixup_replacement
419 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
420 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
421 rtx, int));
422 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
423 struct fixup_replacement **));
424 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
425 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
426 static rtx fixup_stack_1 PROTO((rtx, rtx));
427 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
428 static void instantiate_decls PROTO((tree, int));
429 static void instantiate_decls_1 PROTO((tree, int));
430 static void instantiate_decl PROTO((rtx, int, int));
431 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
432 static void delete_handlers PROTO((void));
433 static void pad_to_arg_alignment PROTO((struct args_size *, int));
434 static void pad_below PROTO((struct args_size *, enum machine_mode,
435 tree));
436 static tree round_down PROTO((tree, int));
437 static rtx round_trampoline_addr PROTO((rtx));
438 static tree blocks_nreverse PROTO((tree));
439 static int all_blocks PROTO((tree, tree *));
440 static int *record_insns PROTO((rtx));
441 static int contains PROTO((rtx, int *));
442 \f
443 /* Pointer to chain of `struct function' for containing functions. */
444 struct function *outer_function_chain;
445
446 /* Given a function decl for a containing function,
447 return the `struct function' for it. */
448
449 struct function *
450 find_function_data (decl)
451 tree decl;
452 {
453 struct function *p;
454 for (p = outer_function_chain; p; p = p->next)
455 if (p->decl == decl)
456 return p;
457 abort ();
458 }
459
460 /* Save the current context for compilation of a nested function.
461 This is called from language-specific code.
462 The caller is responsible for saving any language-specific status,
463 since this function knows only about language-independent variables. */
464
465 void
466 push_function_context_to (context)
467 tree context;
468 {
469 struct function *p = (struct function *) xmalloc (sizeof (struct function));
470
471 p->next = outer_function_chain;
472 outer_function_chain = p;
473
474 p->name = current_function_name;
475 p->decl = current_function_decl;
476 p->pops_args = current_function_pops_args;
477 p->returns_struct = current_function_returns_struct;
478 p->returns_pcc_struct = current_function_returns_pcc_struct;
479 p->needs_context = current_function_needs_context;
480 p->calls_setjmp = current_function_calls_setjmp;
481 p->calls_longjmp = current_function_calls_longjmp;
482 p->calls_alloca = current_function_calls_alloca;
483 p->has_nonlocal_label = current_function_has_nonlocal_label;
484 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
485 p->contains_functions = current_function_contains_functions;
486 p->args_size = current_function_args_size;
487 p->pretend_args_size = current_function_pretend_args_size;
488 p->arg_offset_rtx = current_function_arg_offset_rtx;
489 p->varargs = current_function_varargs;
490 p->uses_const_pool = current_function_uses_const_pool;
491 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
492 p->internal_arg_pointer = current_function_internal_arg_pointer;
493 p->max_parm_reg = max_parm_reg;
494 p->parm_reg_stack_loc = parm_reg_stack_loc;
495 p->outgoing_args_size = current_function_outgoing_args_size;
496 p->return_rtx = current_function_return_rtx;
497 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
498 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
499 p->nonlocal_labels = nonlocal_labels;
500 p->cleanup_label = cleanup_label;
501 p->return_label = return_label;
502 p->save_expr_regs = save_expr_regs;
503 p->stack_slot_list = stack_slot_list;
504 p->parm_birth_insn = parm_birth_insn;
505 p->frame_offset = frame_offset;
506 p->tail_recursion_label = tail_recursion_label;
507 p->tail_recursion_reentry = tail_recursion_reentry;
508 p->arg_pointer_save_area = arg_pointer_save_area;
509 p->rtl_expr_chain = rtl_expr_chain;
510 p->last_parm_insn = last_parm_insn;
511 p->context_display = context_display;
512 p->trampoline_list = trampoline_list;
513 p->function_call_count = function_call_count;
514 p->temp_slots = temp_slots;
515 p->temp_slot_level = temp_slot_level;
516 p->fixup_var_refs_queue = 0;
517 p->epilogue_delay_list = current_function_epilogue_delay_list;
518
519 save_tree_status (p, context);
520 save_storage_status (p);
521 save_emit_status (p);
522 init_emit ();
523 save_expr_status (p);
524 save_stmt_status (p);
525 save_varasm_status (p);
526
527 if (save_machine_status)
528 (*save_machine_status) (p);
529 }
530
531 void
532 push_function_context ()
533 {
534 push_function_context_to (current_function_decl);
535 }
536
537 /* Restore the last saved context, at the end of a nested function.
538 This function is called from language-specific code. */
539
540 void
541 pop_function_context_from (context)
542 tree context;
543 {
544 struct function *p = outer_function_chain;
545
546 outer_function_chain = p->next;
547
548 current_function_contains_functions
549 = p->contains_functions || p->inline_obstacks
550 || context == current_function_decl;
551 current_function_name = p->name;
552 current_function_decl = p->decl;
553 current_function_pops_args = p->pops_args;
554 current_function_returns_struct = p->returns_struct;
555 current_function_returns_pcc_struct = p->returns_pcc_struct;
556 current_function_needs_context = p->needs_context;
557 current_function_calls_setjmp = p->calls_setjmp;
558 current_function_calls_longjmp = p->calls_longjmp;
559 current_function_calls_alloca = p->calls_alloca;
560 current_function_has_nonlocal_label = p->has_nonlocal_label;
561 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
562 current_function_args_size = p->args_size;
563 current_function_pretend_args_size = p->pretend_args_size;
564 current_function_arg_offset_rtx = p->arg_offset_rtx;
565 current_function_varargs = p->varargs;
566 current_function_uses_const_pool = p->uses_const_pool;
567 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
568 current_function_internal_arg_pointer = p->internal_arg_pointer;
569 max_parm_reg = p->max_parm_reg;
570 parm_reg_stack_loc = p->parm_reg_stack_loc;
571 current_function_outgoing_args_size = p->outgoing_args_size;
572 current_function_return_rtx = p->return_rtx;
573 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
574 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
575 nonlocal_labels = p->nonlocal_labels;
576 cleanup_label = p->cleanup_label;
577 return_label = p->return_label;
578 save_expr_regs = p->save_expr_regs;
579 stack_slot_list = p->stack_slot_list;
580 parm_birth_insn = p->parm_birth_insn;
581 frame_offset = p->frame_offset;
582 tail_recursion_label = p->tail_recursion_label;
583 tail_recursion_reentry = p->tail_recursion_reentry;
584 arg_pointer_save_area = p->arg_pointer_save_area;
585 rtl_expr_chain = p->rtl_expr_chain;
586 last_parm_insn = p->last_parm_insn;
587 context_display = p->context_display;
588 trampoline_list = p->trampoline_list;
589 function_call_count = p->function_call_count;
590 temp_slots = p->temp_slots;
591 temp_slot_level = p->temp_slot_level;
592 current_function_epilogue_delay_list = p->epilogue_delay_list;
593 reg_renumber = 0;
594
595 restore_tree_status (p);
596 restore_storage_status (p);
597 restore_expr_status (p);
598 restore_emit_status (p);
599 restore_stmt_status (p);
600 restore_varasm_status (p);
601
602 if (restore_machine_status)
603 (*restore_machine_status) (p);
604
605 /* Finish doing put_var_into_stack for any of our variables
606 which became addressable during the nested function. */
607 {
608 struct var_refs_queue *queue = p->fixup_var_refs_queue;
609 for (; queue; queue = queue->next)
610 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
611 }
612
613 free (p);
614
615 /* Reset variables that have known state during rtx generation. */
616 rtx_equal_function_value_matters = 1;
617 virtuals_instantiated = 0;
618 }
619
620 void pop_function_context ()
621 {
622 pop_function_context_from (current_function_decl);
623 }
624 \f
625 /* Allocate fixed slots in the stack frame of the current function. */
626
627 /* Return size needed for stack frame based on slots so far allocated.
628 This size counts from zero. It is not rounded to STACK_BOUNDARY;
629 the caller may have to do that. */
630
631 int
632 get_frame_size ()
633 {
634 #ifdef FRAME_GROWS_DOWNWARD
635 return -frame_offset;
636 #else
637 return frame_offset;
638 #endif
639 }
640
641 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
642 with machine mode MODE.
643
644 ALIGN controls the amount of alignment for the address of the slot:
645 0 means according to MODE,
646 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
647 positive specifies alignment boundary in bits.
648
649 We do not round to stack_boundary here. */
650
651 rtx
652 assign_stack_local (mode, size, align)
653 enum machine_mode mode;
654 int size;
655 int align;
656 {
657 register rtx x, addr;
658 int bigend_correction = 0;
659 int alignment;
660
661 if (align == 0)
662 {
663 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
664 if (mode == BLKmode)
665 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
666 }
667 else if (align == -1)
668 {
669 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
670 size = CEIL_ROUND (size, alignment);
671 }
672 else
673 alignment = align / BITS_PER_UNIT;
674
675 /* Round frame offset to that alignment.
676 We must be careful here, since FRAME_OFFSET might be negative and
677 division with a negative dividend isn't as well defined as we might
678 like. So we instead assume that ALIGNMENT is a power of two and
679 use logical operations which are unambiguous. */
680 #ifdef FRAME_GROWS_DOWNWARD
681 frame_offset = FLOOR_ROUND (frame_offset, alignment);
682 #else
683 frame_offset = CEIL_ROUND (frame_offset, alignment);
684 #endif
685
686 /* On a big-endian machine, if we are allocating more space than we will use,
687 use the least significant bytes of those that are allocated. */
688 if (BYTES_BIG_ENDIAN && mode != BLKmode)
689 bigend_correction = size - GET_MODE_SIZE (mode);
690
691 #ifdef FRAME_GROWS_DOWNWARD
692 frame_offset -= size;
693 #endif
694
695 /* If we have already instantiated virtual registers, return the actual
696 address relative to the frame pointer. */
697 if (virtuals_instantiated)
698 addr = plus_constant (frame_pointer_rtx,
699 (frame_offset + bigend_correction
700 + STARTING_FRAME_OFFSET));
701 else
702 addr = plus_constant (virtual_stack_vars_rtx,
703 frame_offset + bigend_correction);
704
705 #ifndef FRAME_GROWS_DOWNWARD
706 frame_offset += size;
707 #endif
708
709 x = gen_rtx (MEM, mode, addr);
710
711 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
712
713 return x;
714 }
715
716 /* Assign a stack slot in a containing function.
717 First three arguments are same as in preceding function.
718 The last argument specifies the function to allocate in. */
719
720 rtx
721 assign_outer_stack_local (mode, size, align, function)
722 enum machine_mode mode;
723 int size;
724 int align;
725 struct function *function;
726 {
727 register rtx x, addr;
728 int bigend_correction = 0;
729 int alignment;
730
731 /* Allocate in the memory associated with the function in whose frame
732 we are assigning. */
733 push_obstacks (function->function_obstack,
734 function->function_maybepermanent_obstack);
735
736 if (align == 0)
737 {
738 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
739 if (mode == BLKmode)
740 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
741 }
742 else if (align == -1)
743 {
744 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
745 size = CEIL_ROUND (size, alignment);
746 }
747 else
748 alignment = align / BITS_PER_UNIT;
749
750 /* Round frame offset to that alignment. */
751 #ifdef FRAME_GROWS_DOWNWARD
752 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
753 #else
754 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
755 #endif
756
757 /* On a big-endian machine, if we are allocating more space than we will use,
758 use the least significant bytes of those that are allocated. */
759 if (BYTES_BIG_ENDIAN && mode != BLKmode)
760 bigend_correction = size - GET_MODE_SIZE (mode);
761
762 #ifdef FRAME_GROWS_DOWNWARD
763 function->frame_offset -= size;
764 #endif
765 addr = plus_constant (virtual_stack_vars_rtx,
766 function->frame_offset + bigend_correction);
767 #ifndef FRAME_GROWS_DOWNWARD
768 function->frame_offset += size;
769 #endif
770
771 x = gen_rtx (MEM, mode, addr);
772
773 function->stack_slot_list
774 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
775
776 pop_obstacks ();
777
778 return x;
779 }
780 \f
781 /* Allocate a temporary stack slot and record it for possible later
782 reuse.
783
784 MODE is the machine mode to be given to the returned rtx.
785
786 SIZE is the size in units of the space required. We do no rounding here
787 since assign_stack_local will do any required rounding.
788
789 KEEP is 1 if this slot is to be retained after a call to
790 free_temp_slots. Automatic variables for a block are allocated
791 with this flag. KEEP is 2, if we allocate a longer term temporary,
792 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
793
794 rtx
795 assign_stack_temp (mode, size, keep)
796 enum machine_mode mode;
797 int size;
798 int keep;
799 {
800 struct temp_slot *p, *best_p = 0;
801
802 /* If SIZE is -1 it means that somebody tried to allocate a temporary
803 of a variable size. */
804 if (size == -1)
805 abort ();
806
807 /* First try to find an available, already-allocated temporary that is the
808 exact size we require. */
809 for (p = temp_slots; p; p = p->next)
810 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
811 break;
812
813 /* If we didn't find, one, try one that is larger than what we want. We
814 find the smallest such. */
815 if (p == 0)
816 for (p = temp_slots; p; p = p->next)
817 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
818 && (best_p == 0 || best_p->size > p->size))
819 best_p = p;
820
821 /* Make our best, if any, the one to use. */
822 if (best_p)
823 {
824 /* If there are enough aligned bytes left over, make them into a new
825 temp_slot so that the extra bytes don't get wasted. Do this only
826 for BLKmode slots, so that we can be sure of the alignment. */
827 if (GET_MODE (best_p->slot) == BLKmode)
828 {
829 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
830 int rounded_size = CEIL_ROUND (size, alignment);
831
832 if (best_p->size - rounded_size >= alignment)
833 {
834 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
835 p->in_use = p->addr_taken = 0;
836 p->size = best_p->size - rounded_size;
837 p->slot = gen_rtx (MEM, BLKmode,
838 plus_constant (XEXP (best_p->slot, 0),
839 rounded_size));
840 p->address = 0;
841 p->rtl_expr = 0;
842 p->next = temp_slots;
843 temp_slots = p;
844
845 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot,
846 stack_slot_list);
847
848 best_p->size = rounded_size;
849 }
850 }
851
852 p = best_p;
853 }
854
855 /* If we still didn't find one, make a new temporary. */
856 if (p == 0)
857 {
858 int frame_offset_old = frame_offset;
859 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
860 /* If the temp slot mode doesn't indicate the alignment,
861 use the largest possible, so no one will be disappointed. */
862 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
863 /* The following slot size computation is necessary because we don't
864 know the actual size of the temporary slot until assign_stack_local
865 has performed all the frame alignment and size rounding for the
866 requested temporary. Otherwise combine_temp_slots won't think that
867 adjacent slots really are adjacent. */
868 #ifdef FRAME_GROWS_DOWNWARD
869 p->size = frame_offset_old - frame_offset;
870 #else
871 p->size = frame_offset - frame_offset_old;
872 #endif
873 p->address = 0;
874 p->next = temp_slots;
875 temp_slots = p;
876 }
877
878 p->in_use = 1;
879 p->addr_taken = 0;
880 p->rtl_expr = sequence_rtl_expr;
881
882 if (keep == 2)
883 {
884 p->level = target_temp_slot_level;
885 p->keep = 0;
886 }
887 else
888 {
889 p->level = temp_slot_level;
890 p->keep = keep;
891 }
892 return p->slot;
893 }
894
895 /* Combine temporary stack slots which are adjacent on the stack.
896
897 This allows for better use of already allocated stack space. This is only
898 done for BLKmode slots because we can be sure that we won't have alignment
899 problems in this case. */
900
901 void
902 combine_temp_slots ()
903 {
904 struct temp_slot *p, *q;
905 struct temp_slot *prev_p, *prev_q;
906 /* Determine where to free back to after this function. */
907 rtx free_pointer = rtx_alloc (CONST_INT);
908
909 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
910 {
911 int delete_p = 0;
912 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
913 for (q = p->next, prev_q = p; q; q = prev_q->next)
914 {
915 int delete_q = 0;
916 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
917 {
918 if (rtx_equal_p (plus_constant (XEXP (p->slot, 0), p->size),
919 XEXP (q->slot, 0)))
920 {
921 /* Q comes after P; combine Q into P. */
922 p->size += q->size;
923 delete_q = 1;
924 }
925 else if (rtx_equal_p (plus_constant (XEXP (q->slot, 0), q->size),
926 XEXP (p->slot, 0)))
927 {
928 /* P comes after Q; combine P into Q. */
929 q->size += p->size;
930 delete_p = 1;
931 break;
932 }
933 }
934 /* Either delete Q or advance past it. */
935 if (delete_q)
936 prev_q->next = q->next;
937 else
938 prev_q = q;
939 }
940 /* Either delete P or advance past it. */
941 if (delete_p)
942 {
943 if (prev_p)
944 prev_p->next = p->next;
945 else
946 temp_slots = p->next;
947 }
948 else
949 prev_p = p;
950 }
951
952 /* Free all the RTL made by plus_constant. */
953 rtx_free (free_pointer);
954 }
955 \f
956 /* Find the temp slot corresponding to the object at address X. */
957
958 static struct temp_slot *
959 find_temp_slot_from_address (x)
960 rtx x;
961 {
962 struct temp_slot *p;
963 rtx next;
964
965 for (p = temp_slots; p; p = p->next)
966 {
967 if (! p->in_use)
968 continue;
969 else if (XEXP (p->slot, 0) == x
970 || p->address == x)
971 return p;
972
973 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
974 for (next = p->address; next; next = XEXP (next, 1))
975 if (XEXP (next, 0) == x)
976 return p;
977 }
978
979 return 0;
980 }
981
982 /* Indicate that NEW is an alternate way of referring to the temp slot
983 that previous was known by OLD. */
984
985 void
986 update_temp_slot_address (old, new)
987 rtx old, new;
988 {
989 struct temp_slot *p = find_temp_slot_from_address (old);
990
991 /* If none, return. Else add NEW as an alias. */
992 if (p == 0)
993 return;
994 else if (p->address == 0)
995 p->address = new;
996 else
997 {
998 if (GET_CODE (p->address) != EXPR_LIST)
999 p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX);
1000
1001 p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address);
1002 }
1003 }
1004
1005 /* If X could be a reference to a temporary slot, mark the fact that its
1006 address was taken. */
1007
1008 void
1009 mark_temp_addr_taken (x)
1010 rtx x;
1011 {
1012 struct temp_slot *p;
1013
1014 if (x == 0)
1015 return;
1016
1017 /* If X is not in memory or is at a constant address, it cannot be in
1018 a temporary slot. */
1019 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1020 return;
1021
1022 p = find_temp_slot_from_address (XEXP (x, 0));
1023 if (p != 0)
1024 p->addr_taken = 1;
1025 }
1026
1027 /* If X could be a reference to a temporary slot, mark that slot as belonging
1028 to the to one level higher. If X matched one of our slots, just mark that
1029 one. Otherwise, we can't easily predict which it is, so upgrade all of
1030 them. Kept slots need not be touched.
1031
1032 This is called when an ({...}) construct occurs and a statement
1033 returns a value in memory. */
1034
1035 void
1036 preserve_temp_slots (x)
1037 rtx x;
1038 {
1039 struct temp_slot *p = 0;
1040
1041 /* If there is no result, we still might have some objects whose address
1042 were taken, so we need to make sure they stay around. */
1043 if (x == 0)
1044 {
1045 for (p = temp_slots; p; p = p->next)
1046 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1047 p->level--;
1048
1049 return;
1050 }
1051
1052 /* If X is a register that is being used as a pointer, see if we have
1053 a temporary slot we know it points to. To be consistent with
1054 the code below, we really should preserve all non-kept slots
1055 if we can't find a match, but that seems to be much too costly. */
1056 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1057 p = find_temp_slot_from_address (x);
1058
1059 /* If X is not in memory or is at a constant address, it cannot be in
1060 a temporary slot, but it can contain something whose address was
1061 taken. */
1062 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1063 {
1064 for (p = temp_slots; p; p = p->next)
1065 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1066 p->level--;
1067
1068 return;
1069 }
1070
1071 /* First see if we can find a match. */
1072 if (p == 0)
1073 p = find_temp_slot_from_address (XEXP (x, 0));
1074
1075 if (p != 0)
1076 {
1077 /* Move everything at our level whose address was taken to our new
1078 level in case we used its address. */
1079 struct temp_slot *q;
1080
1081 for (q = temp_slots; q; q = q->next)
1082 if (q != p && q->addr_taken && q->level == p->level)
1083 q->level--;
1084
1085 p->level--;
1086 p->addr_taken = 0;
1087 return;
1088 }
1089
1090 /* Otherwise, preserve all non-kept slots at this level. */
1091 for (p = temp_slots; p; p = p->next)
1092 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1093 p->level--;
1094 }
1095
1096 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1097 with that RTL_EXPR, promote it into a temporary slot at the present
1098 level so it will not be freed when we free slots made in the
1099 RTL_EXPR. */
1100
1101 void
1102 preserve_rtl_expr_result (x)
1103 rtx x;
1104 {
1105 struct temp_slot *p;
1106
1107 /* If X is not in memory or is at a constant address, it cannot be in
1108 a temporary slot. */
1109 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1110 return;
1111
1112 /* If we can find a match, move it to our level unless it is already at
1113 an upper level. */
1114 p = find_temp_slot_from_address (XEXP (x, 0));
1115 if (p != 0)
1116 {
1117 p->level = MIN (p->level, temp_slot_level);
1118 p->rtl_expr = 0;
1119 }
1120
1121 return;
1122 }
1123
1124 /* Free all temporaries used so far. This is normally called at the end
1125 of generating code for a statement. Don't free any temporaries
1126 currently in use for an RTL_EXPR that hasn't yet been emitted.
1127 We could eventually do better than this since it can be reused while
1128 generating the same RTL_EXPR, but this is complex and probably not
1129 worthwhile. */
1130
1131 void
1132 free_temp_slots ()
1133 {
1134 struct temp_slot *p;
1135
1136 for (p = temp_slots; p; p = p->next)
1137 if (p->in_use && p->level == temp_slot_level && ! p->keep
1138 && p->rtl_expr == 0)
1139 p->in_use = 0;
1140
1141 combine_temp_slots ();
1142 }
1143
1144 /* Free all temporary slots used in T, an RTL_EXPR node. */
1145
1146 void
1147 free_temps_for_rtl_expr (t)
1148 tree t;
1149 {
1150 struct temp_slot *p;
1151
1152 for (p = temp_slots; p; p = p->next)
1153 if (p->rtl_expr == t)
1154 p->in_use = 0;
1155
1156 combine_temp_slots ();
1157 }
1158
1159 /* Push deeper into the nesting level for stack temporaries. */
1160
1161 void
1162 push_temp_slots ()
1163 {
1164 temp_slot_level++;
1165 }
1166
1167 /* Pop a temporary nesting level. All slots in use in the current level
1168 are freed. */
1169
1170 void
1171 pop_temp_slots ()
1172 {
1173 struct temp_slot *p;
1174
1175 for (p = temp_slots; p; p = p->next)
1176 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1177 p->in_use = 0;
1178
1179 combine_temp_slots ();
1180
1181 temp_slot_level--;
1182 }
1183 \f
1184 /* Retroactively move an auto variable from a register to a stack slot.
1185 This is done when an address-reference to the variable is seen. */
1186
1187 void
1188 put_var_into_stack (decl)
1189 tree decl;
1190 {
1191 register rtx reg;
1192 enum machine_mode promoted_mode, decl_mode;
1193 struct function *function = 0;
1194 tree context;
1195
1196 if (output_bytecode)
1197 return;
1198
1199 context = decl_function_context (decl);
1200
1201 /* Get the current rtl used for this object and it's original mode. */
1202 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1203
1204 /* No need to do anything if decl has no rtx yet
1205 since in that case caller is setting TREE_ADDRESSABLE
1206 and a stack slot will be assigned when the rtl is made. */
1207 if (reg == 0)
1208 return;
1209
1210 /* Get the declared mode for this object. */
1211 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1212 : DECL_MODE (decl));
1213 /* Get the mode it's actually stored in. */
1214 promoted_mode = GET_MODE (reg);
1215
1216 /* If this variable comes from an outer function,
1217 find that function's saved context. */
1218 if (context != current_function_decl)
1219 for (function = outer_function_chain; function; function = function->next)
1220 if (function->decl == context)
1221 break;
1222
1223 /* If this is a variable-size object with a pseudo to address it,
1224 put that pseudo into the stack, if the var is nonlocal. */
1225 if (DECL_NONLOCAL (decl)
1226 && GET_CODE (reg) == MEM
1227 && GET_CODE (XEXP (reg, 0)) == REG
1228 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1229 {
1230 reg = XEXP (reg, 0);
1231 decl_mode = promoted_mode = GET_MODE (reg);
1232 }
1233
1234 /* Now we should have a value that resides in one or more pseudo regs. */
1235
1236 if (GET_CODE (reg) == REG)
1237 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1238 promoted_mode, decl_mode);
1239 else if (GET_CODE (reg) == CONCAT)
1240 {
1241 /* A CONCAT contains two pseudos; put them both in the stack.
1242 We do it so they end up consecutive. */
1243 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1244 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1245 #ifdef STACK_GROWS_DOWNWARD
1246 /* Since part 0 should have a lower address, do it second. */
1247 put_reg_into_stack (function, XEXP (reg, 1),
1248 part_type, part_mode, part_mode);
1249 put_reg_into_stack (function, XEXP (reg, 0),
1250 part_type, part_mode, part_mode);
1251 #else
1252 put_reg_into_stack (function, XEXP (reg, 0),
1253 part_type, part_mode, part_mode);
1254 put_reg_into_stack (function, XEXP (reg, 1),
1255 part_type, part_mode, part_mode);
1256 #endif
1257
1258 /* Change the CONCAT into a combined MEM for both parts. */
1259 PUT_CODE (reg, MEM);
1260 /* The two parts are in memory order already.
1261 Use the lower parts address as ours. */
1262 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1263 /* Prevent sharing of rtl that might lose. */
1264 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1265 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1266 }
1267 }
1268
1269 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1270 into the stack frame of FUNCTION (0 means the current function).
1271 DECL_MODE is the machine mode of the user-level data type.
1272 PROMOTED_MODE is the machine mode of the register. */
1273
1274 static void
1275 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode)
1276 struct function *function;
1277 rtx reg;
1278 tree type;
1279 enum machine_mode promoted_mode, decl_mode;
1280 {
1281 rtx new = 0;
1282
1283 if (function)
1284 {
1285 if (REGNO (reg) < function->max_parm_reg)
1286 new = function->parm_reg_stack_loc[REGNO (reg)];
1287 if (new == 0)
1288 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1289 0, function);
1290 }
1291 else
1292 {
1293 if (REGNO (reg) < max_parm_reg)
1294 new = parm_reg_stack_loc[REGNO (reg)];
1295 if (new == 0)
1296 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1297 }
1298
1299 XEXP (reg, 0) = XEXP (new, 0);
1300 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1301 REG_USERVAR_P (reg) = 0;
1302 PUT_CODE (reg, MEM);
1303 PUT_MODE (reg, decl_mode);
1304
1305 /* If this is a memory ref that contains aggregate components,
1306 mark it as such for cse and loop optimize. */
1307 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1308
1309 /* Now make sure that all refs to the variable, previously made
1310 when it was a register, are fixed up to be valid again. */
1311 if (function)
1312 {
1313 struct var_refs_queue *temp;
1314
1315 /* Variable is inherited; fix it up when we get back to its function. */
1316 push_obstacks (function->function_obstack,
1317 function->function_maybepermanent_obstack);
1318
1319 /* See comment in restore_tree_status in tree.c for why this needs to be
1320 on saveable obstack. */
1321 temp
1322 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1323 temp->modified = reg;
1324 temp->promoted_mode = promoted_mode;
1325 temp->unsignedp = TREE_UNSIGNED (type);
1326 temp->next = function->fixup_var_refs_queue;
1327 function->fixup_var_refs_queue = temp;
1328 pop_obstacks ();
1329 }
1330 else
1331 /* Variable is local; fix it up now. */
1332 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1333 }
1334 \f
1335 static void
1336 fixup_var_refs (var, promoted_mode, unsignedp)
1337 rtx var;
1338 enum machine_mode promoted_mode;
1339 int unsignedp;
1340 {
1341 tree pending;
1342 rtx first_insn = get_insns ();
1343 struct sequence_stack *stack = sequence_stack;
1344 tree rtl_exps = rtl_expr_chain;
1345
1346 /* Must scan all insns for stack-refs that exceed the limit. */
1347 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1348
1349 /* Scan all pending sequences too. */
1350 for (; stack; stack = stack->next)
1351 {
1352 push_to_sequence (stack->first);
1353 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1354 stack->first, stack->next != 0);
1355 /* Update remembered end of sequence
1356 in case we added an insn at the end. */
1357 stack->last = get_last_insn ();
1358 end_sequence ();
1359 }
1360
1361 /* Scan all waiting RTL_EXPRs too. */
1362 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1363 {
1364 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1365 if (seq != const0_rtx && seq != 0)
1366 {
1367 push_to_sequence (seq);
1368 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1369 end_sequence ();
1370 }
1371 }
1372 }
1373 \f
1374 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1375 some part of an insn. Return a struct fixup_replacement whose OLD
1376 value is equal to X. Allocate a new structure if no such entry exists. */
1377
1378 static struct fixup_replacement *
1379 find_fixup_replacement (replacements, x)
1380 struct fixup_replacement **replacements;
1381 rtx x;
1382 {
1383 struct fixup_replacement *p;
1384
1385 /* See if we have already replaced this. */
1386 for (p = *replacements; p && p->old != x; p = p->next)
1387 ;
1388
1389 if (p == 0)
1390 {
1391 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1392 p->old = x;
1393 p->new = 0;
1394 p->next = *replacements;
1395 *replacements = p;
1396 }
1397
1398 return p;
1399 }
1400
1401 /* Scan the insn-chain starting with INSN for refs to VAR
1402 and fix them up. TOPLEVEL is nonzero if this chain is the
1403 main chain of insns for the current function. */
1404
1405 static void
1406 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1407 rtx var;
1408 enum machine_mode promoted_mode;
1409 int unsignedp;
1410 rtx insn;
1411 int toplevel;
1412 {
1413 rtx call_dest = 0;
1414
1415 while (insn)
1416 {
1417 rtx next = NEXT_INSN (insn);
1418 rtx note;
1419 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1420 {
1421 /* If this is a CLOBBER of VAR, delete it.
1422
1423 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1424 and REG_RETVAL notes too. */
1425 if (GET_CODE (PATTERN (insn)) == CLOBBER
1426 && XEXP (PATTERN (insn), 0) == var)
1427 {
1428 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1429 /* The REG_LIBCALL note will go away since we are going to
1430 turn INSN into a NOTE, so just delete the
1431 corresponding REG_RETVAL note. */
1432 remove_note (XEXP (note, 0),
1433 find_reg_note (XEXP (note, 0), REG_RETVAL,
1434 NULL_RTX));
1435
1436 /* In unoptimized compilation, we shouldn't call delete_insn
1437 except in jump.c doing warnings. */
1438 PUT_CODE (insn, NOTE);
1439 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1440 NOTE_SOURCE_FILE (insn) = 0;
1441 }
1442
1443 /* The insn to load VAR from a home in the arglist
1444 is now a no-op. When we see it, just delete it. */
1445 else if (toplevel
1446 && GET_CODE (PATTERN (insn)) == SET
1447 && SET_DEST (PATTERN (insn)) == var
1448 /* If this represents the result of an insn group,
1449 don't delete the insn. */
1450 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1451 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1452 {
1453 /* In unoptimized compilation, we shouldn't call delete_insn
1454 except in jump.c doing warnings. */
1455 PUT_CODE (insn, NOTE);
1456 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1457 NOTE_SOURCE_FILE (insn) = 0;
1458 if (insn == last_parm_insn)
1459 last_parm_insn = PREV_INSN (next);
1460 }
1461 else
1462 {
1463 struct fixup_replacement *replacements = 0;
1464 rtx next_insn = NEXT_INSN (insn);
1465
1466 #ifdef SMALL_REGISTER_CLASSES
1467 /* If the insn that copies the results of a CALL_INSN
1468 into a pseudo now references VAR, we have to use an
1469 intermediate pseudo since we want the life of the
1470 return value register to be only a single insn.
1471
1472 If we don't use an intermediate pseudo, such things as
1473 address computations to make the address of VAR valid
1474 if it is not can be placed between the CALL_INSN and INSN.
1475
1476 To make sure this doesn't happen, we record the destination
1477 of the CALL_INSN and see if the next insn uses both that
1478 and VAR. */
1479
1480 if (call_dest != 0 && GET_CODE (insn) == INSN
1481 && reg_mentioned_p (var, PATTERN (insn))
1482 && reg_mentioned_p (call_dest, PATTERN (insn)))
1483 {
1484 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1485
1486 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1487
1488 PATTERN (insn) = replace_rtx (PATTERN (insn),
1489 call_dest, temp);
1490 }
1491
1492 if (GET_CODE (insn) == CALL_INSN
1493 && GET_CODE (PATTERN (insn)) == SET)
1494 call_dest = SET_DEST (PATTERN (insn));
1495 else if (GET_CODE (insn) == CALL_INSN
1496 && GET_CODE (PATTERN (insn)) == PARALLEL
1497 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1498 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1499 else
1500 call_dest = 0;
1501 #endif
1502
1503 /* See if we have to do anything to INSN now that VAR is in
1504 memory. If it needs to be loaded into a pseudo, use a single
1505 pseudo for the entire insn in case there is a MATCH_DUP
1506 between two operands. We pass a pointer to the head of
1507 a list of struct fixup_replacements. If fixup_var_refs_1
1508 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1509 it will record them in this list.
1510
1511 If it allocated a pseudo for any replacement, we copy into
1512 it here. */
1513
1514 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1515 &replacements);
1516
1517 /* If this is last_parm_insn, and any instructions were output
1518 after it to fix it up, then we must set last_parm_insn to
1519 the last such instruction emitted. */
1520 if (insn == last_parm_insn)
1521 last_parm_insn = PREV_INSN (next_insn);
1522
1523 while (replacements)
1524 {
1525 if (GET_CODE (replacements->new) == REG)
1526 {
1527 rtx insert_before;
1528 rtx seq;
1529
1530 /* OLD might be a (subreg (mem)). */
1531 if (GET_CODE (replacements->old) == SUBREG)
1532 replacements->old
1533 = fixup_memory_subreg (replacements->old, insn, 0);
1534 else
1535 replacements->old
1536 = fixup_stack_1 (replacements->old, insn);
1537
1538 insert_before = insn;
1539
1540 /* If we are changing the mode, do a conversion.
1541 This might be wasteful, but combine.c will
1542 eliminate much of the waste. */
1543
1544 if (GET_MODE (replacements->new)
1545 != GET_MODE (replacements->old))
1546 {
1547 start_sequence ();
1548 convert_move (replacements->new,
1549 replacements->old, unsignedp);
1550 seq = gen_sequence ();
1551 end_sequence ();
1552 }
1553 else
1554 seq = gen_move_insn (replacements->new,
1555 replacements->old);
1556
1557 emit_insn_before (seq, insert_before);
1558 }
1559
1560 replacements = replacements->next;
1561 }
1562 }
1563
1564 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1565 But don't touch other insns referred to by reg-notes;
1566 we will get them elsewhere. */
1567 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1568 if (GET_CODE (note) != INSN_LIST)
1569 XEXP (note, 0)
1570 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1571 }
1572 insn = next;
1573 }
1574 }
1575 \f
1576 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1577 See if the rtx expression at *LOC in INSN needs to be changed.
1578
1579 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1580 contain a list of original rtx's and replacements. If we find that we need
1581 to modify this insn by replacing a memory reference with a pseudo or by
1582 making a new MEM to implement a SUBREG, we consult that list to see if
1583 we have already chosen a replacement. If none has already been allocated,
1584 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1585 or the SUBREG, as appropriate, to the pseudo. */
1586
1587 static void
1588 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1589 register rtx var;
1590 enum machine_mode promoted_mode;
1591 register rtx *loc;
1592 rtx insn;
1593 struct fixup_replacement **replacements;
1594 {
1595 register int i;
1596 register rtx x = *loc;
1597 RTX_CODE code = GET_CODE (x);
1598 register char *fmt;
1599 register rtx tem, tem1;
1600 struct fixup_replacement *replacement;
1601
1602 switch (code)
1603 {
1604 case MEM:
1605 if (var == x)
1606 {
1607 /* If we already have a replacement, use it. Otherwise,
1608 try to fix up this address in case it is invalid. */
1609
1610 replacement = find_fixup_replacement (replacements, var);
1611 if (replacement->new)
1612 {
1613 *loc = replacement->new;
1614 return;
1615 }
1616
1617 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1618
1619 /* Unless we are forcing memory to register or we changed the mode,
1620 we can leave things the way they are if the insn is valid. */
1621
1622 INSN_CODE (insn) = -1;
1623 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1624 && recog_memoized (insn) >= 0)
1625 return;
1626
1627 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1628 return;
1629 }
1630
1631 /* If X contains VAR, we need to unshare it here so that we update
1632 each occurrence separately. But all identical MEMs in one insn
1633 must be replaced with the same rtx because of the possibility of
1634 MATCH_DUPs. */
1635
1636 if (reg_mentioned_p (var, x))
1637 {
1638 replacement = find_fixup_replacement (replacements, x);
1639 if (replacement->new == 0)
1640 replacement->new = copy_most_rtx (x, var);
1641
1642 *loc = x = replacement->new;
1643 }
1644 break;
1645
1646 case REG:
1647 case CC0:
1648 case PC:
1649 case CONST_INT:
1650 case CONST:
1651 case SYMBOL_REF:
1652 case LABEL_REF:
1653 case CONST_DOUBLE:
1654 return;
1655
1656 case SIGN_EXTRACT:
1657 case ZERO_EXTRACT:
1658 /* Note that in some cases those types of expressions are altered
1659 by optimize_bit_field, and do not survive to get here. */
1660 if (XEXP (x, 0) == var
1661 || (GET_CODE (XEXP (x, 0)) == SUBREG
1662 && SUBREG_REG (XEXP (x, 0)) == var))
1663 {
1664 /* Get TEM as a valid MEM in the mode presently in the insn.
1665
1666 We don't worry about the possibility of MATCH_DUP here; it
1667 is highly unlikely and would be tricky to handle. */
1668
1669 tem = XEXP (x, 0);
1670 if (GET_CODE (tem) == SUBREG)
1671 tem = fixup_memory_subreg (tem, insn, 1);
1672 tem = fixup_stack_1 (tem, insn);
1673
1674 /* Unless we want to load from memory, get TEM into the proper mode
1675 for an extract from memory. This can only be done if the
1676 extract is at a constant position and length. */
1677
1678 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1679 && GET_CODE (XEXP (x, 2)) == CONST_INT
1680 && ! mode_dependent_address_p (XEXP (tem, 0))
1681 && ! MEM_VOLATILE_P (tem))
1682 {
1683 enum machine_mode wanted_mode = VOIDmode;
1684 enum machine_mode is_mode = GET_MODE (tem);
1685 int width = INTVAL (XEXP (x, 1));
1686 int pos = INTVAL (XEXP (x, 2));
1687
1688 #ifdef HAVE_extzv
1689 if (GET_CODE (x) == ZERO_EXTRACT)
1690 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1691 #endif
1692 #ifdef HAVE_extv
1693 if (GET_CODE (x) == SIGN_EXTRACT)
1694 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1695 #endif
1696 /* If we have a narrower mode, we can do something. */
1697 if (wanted_mode != VOIDmode
1698 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1699 {
1700 int offset = pos / BITS_PER_UNIT;
1701 rtx old_pos = XEXP (x, 2);
1702 rtx newmem;
1703
1704 /* If the bytes and bits are counted differently, we
1705 must adjust the offset. */
1706 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1707 offset = (GET_MODE_SIZE (is_mode)
1708 - GET_MODE_SIZE (wanted_mode) - offset);
1709
1710 pos %= GET_MODE_BITSIZE (wanted_mode);
1711
1712 newmem = gen_rtx (MEM, wanted_mode,
1713 plus_constant (XEXP (tem, 0), offset));
1714 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1715 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1716 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1717
1718 /* Make the change and see if the insn remains valid. */
1719 INSN_CODE (insn) = -1;
1720 XEXP (x, 0) = newmem;
1721 XEXP (x, 2) = GEN_INT (pos);
1722
1723 if (recog_memoized (insn) >= 0)
1724 return;
1725
1726 /* Otherwise, restore old position. XEXP (x, 0) will be
1727 restored later. */
1728 XEXP (x, 2) = old_pos;
1729 }
1730 }
1731
1732 /* If we get here, the bitfield extract insn can't accept a memory
1733 reference. Copy the input into a register. */
1734
1735 tem1 = gen_reg_rtx (GET_MODE (tem));
1736 emit_insn_before (gen_move_insn (tem1, tem), insn);
1737 XEXP (x, 0) = tem1;
1738 return;
1739 }
1740 break;
1741
1742 case SUBREG:
1743 if (SUBREG_REG (x) == var)
1744 {
1745 /* If this is a special SUBREG made because VAR was promoted
1746 from a wider mode, replace it with VAR and call ourself
1747 recursively, this time saying that the object previously
1748 had its current mode (by virtue of the SUBREG). */
1749
1750 if (SUBREG_PROMOTED_VAR_P (x))
1751 {
1752 *loc = var;
1753 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1754 return;
1755 }
1756
1757 /* If this SUBREG makes VAR wider, it has become a paradoxical
1758 SUBREG with VAR in memory, but these aren't allowed at this
1759 stage of the compilation. So load VAR into a pseudo and take
1760 a SUBREG of that pseudo. */
1761 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1762 {
1763 replacement = find_fixup_replacement (replacements, var);
1764 if (replacement->new == 0)
1765 replacement->new = gen_reg_rtx (GET_MODE (var));
1766 SUBREG_REG (x) = replacement->new;
1767 return;
1768 }
1769
1770 /* See if we have already found a replacement for this SUBREG.
1771 If so, use it. Otherwise, make a MEM and see if the insn
1772 is recognized. If not, or if we should force MEM into a register,
1773 make a pseudo for this SUBREG. */
1774 replacement = find_fixup_replacement (replacements, x);
1775 if (replacement->new)
1776 {
1777 *loc = replacement->new;
1778 return;
1779 }
1780
1781 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1782
1783 INSN_CODE (insn) = -1;
1784 if (! flag_force_mem && recog_memoized (insn) >= 0)
1785 return;
1786
1787 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1788 return;
1789 }
1790 break;
1791
1792 case SET:
1793 /* First do special simplification of bit-field references. */
1794 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1795 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1796 optimize_bit_field (x, insn, 0);
1797 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1798 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1799 optimize_bit_field (x, insn, NULL_PTR);
1800
1801 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1802 insn into a pseudo and store the low part of the pseudo into VAR. */
1803 if (GET_CODE (SET_DEST (x)) == SUBREG
1804 && SUBREG_REG (SET_DEST (x)) == var
1805 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1806 > GET_MODE_SIZE (GET_MODE (var))))
1807 {
1808 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1809 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1810 tem)),
1811 insn);
1812 break;
1813 }
1814
1815 {
1816 rtx dest = SET_DEST (x);
1817 rtx src = SET_SRC (x);
1818 rtx outerdest = dest;
1819
1820 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1821 || GET_CODE (dest) == SIGN_EXTRACT
1822 || GET_CODE (dest) == ZERO_EXTRACT)
1823 dest = XEXP (dest, 0);
1824
1825 if (GET_CODE (src) == SUBREG)
1826 src = XEXP (src, 0);
1827
1828 /* If VAR does not appear at the top level of the SET
1829 just scan the lower levels of the tree. */
1830
1831 if (src != var && dest != var)
1832 break;
1833
1834 /* We will need to rerecognize this insn. */
1835 INSN_CODE (insn) = -1;
1836
1837 #ifdef HAVE_insv
1838 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1839 {
1840 /* Since this case will return, ensure we fixup all the
1841 operands here. */
1842 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1843 insn, replacements);
1844 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1845 insn, replacements);
1846 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1847 insn, replacements);
1848
1849 tem = XEXP (outerdest, 0);
1850
1851 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1852 that may appear inside a ZERO_EXTRACT.
1853 This was legitimate when the MEM was a REG. */
1854 if (GET_CODE (tem) == SUBREG
1855 && SUBREG_REG (tem) == var)
1856 tem = fixup_memory_subreg (tem, insn, 1);
1857 else
1858 tem = fixup_stack_1 (tem, insn);
1859
1860 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1861 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1862 && ! mode_dependent_address_p (XEXP (tem, 0))
1863 && ! MEM_VOLATILE_P (tem))
1864 {
1865 enum machine_mode wanted_mode
1866 = insn_operand_mode[(int) CODE_FOR_insv][0];
1867 enum machine_mode is_mode = GET_MODE (tem);
1868 int width = INTVAL (XEXP (outerdest, 1));
1869 int pos = INTVAL (XEXP (outerdest, 2));
1870
1871 /* If we have a narrower mode, we can do something. */
1872 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1873 {
1874 int offset = pos / BITS_PER_UNIT;
1875 rtx old_pos = XEXP (outerdest, 2);
1876 rtx newmem;
1877
1878 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1879 offset = (GET_MODE_SIZE (is_mode)
1880 - GET_MODE_SIZE (wanted_mode) - offset);
1881
1882 pos %= GET_MODE_BITSIZE (wanted_mode);
1883
1884 newmem = gen_rtx (MEM, wanted_mode,
1885 plus_constant (XEXP (tem, 0), offset));
1886 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1887 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1888 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1889
1890 /* Make the change and see if the insn remains valid. */
1891 INSN_CODE (insn) = -1;
1892 XEXP (outerdest, 0) = newmem;
1893 XEXP (outerdest, 2) = GEN_INT (pos);
1894
1895 if (recog_memoized (insn) >= 0)
1896 return;
1897
1898 /* Otherwise, restore old position. XEXP (x, 0) will be
1899 restored later. */
1900 XEXP (outerdest, 2) = old_pos;
1901 }
1902 }
1903
1904 /* If we get here, the bit-field store doesn't allow memory
1905 or isn't located at a constant position. Load the value into
1906 a register, do the store, and put it back into memory. */
1907
1908 tem1 = gen_reg_rtx (GET_MODE (tem));
1909 emit_insn_before (gen_move_insn (tem1, tem), insn);
1910 emit_insn_after (gen_move_insn (tem, tem1), insn);
1911 XEXP (outerdest, 0) = tem1;
1912 return;
1913 }
1914 #endif
1915
1916 /* STRICT_LOW_PART is a no-op on memory references
1917 and it can cause combinations to be unrecognizable,
1918 so eliminate it. */
1919
1920 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1921 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1922
1923 /* A valid insn to copy VAR into or out of a register
1924 must be left alone, to avoid an infinite loop here.
1925 If the reference to VAR is by a subreg, fix that up,
1926 since SUBREG is not valid for a memref.
1927 Also fix up the address of the stack slot.
1928
1929 Note that we must not try to recognize the insn until
1930 after we know that we have valid addresses and no
1931 (subreg (mem ...) ...) constructs, since these interfere
1932 with determining the validity of the insn. */
1933
1934 if ((SET_SRC (x) == var
1935 || (GET_CODE (SET_SRC (x)) == SUBREG
1936 && SUBREG_REG (SET_SRC (x)) == var))
1937 && (GET_CODE (SET_DEST (x)) == REG
1938 || (GET_CODE (SET_DEST (x)) == SUBREG
1939 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1940 && x == single_set (PATTERN (insn)))
1941 {
1942 rtx pat;
1943
1944 replacement = find_fixup_replacement (replacements, SET_SRC (x));
1945 if (replacement->new)
1946 SET_SRC (x) = replacement->new;
1947 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1948 SET_SRC (x) = replacement->new
1949 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1950 else
1951 SET_SRC (x) = replacement->new
1952 = fixup_stack_1 (SET_SRC (x), insn);
1953
1954 if (recog_memoized (insn) >= 0)
1955 return;
1956
1957 /* INSN is not valid, but we know that we want to
1958 copy SET_SRC (x) to SET_DEST (x) in some way. So
1959 we generate the move and see whether it requires more
1960 than one insn. If it does, we emit those insns and
1961 delete INSN. Otherwise, we an just replace the pattern
1962 of INSN; we have already verified above that INSN has
1963 no other function that to do X. */
1964
1965 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
1966 if (GET_CODE (pat) == SEQUENCE)
1967 {
1968 emit_insn_after (pat, insn);
1969 PUT_CODE (insn, NOTE);
1970 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1971 NOTE_SOURCE_FILE (insn) = 0;
1972 }
1973 else
1974 PATTERN (insn) = pat;
1975
1976 return;
1977 }
1978
1979 if ((SET_DEST (x) == var
1980 || (GET_CODE (SET_DEST (x)) == SUBREG
1981 && SUBREG_REG (SET_DEST (x)) == var))
1982 && (GET_CODE (SET_SRC (x)) == REG
1983 || (GET_CODE (SET_SRC (x)) == SUBREG
1984 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1985 && x == single_set (PATTERN (insn)))
1986 {
1987 rtx pat;
1988
1989 if (GET_CODE (SET_DEST (x)) == SUBREG)
1990 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1991 else
1992 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1993
1994 if (recog_memoized (insn) >= 0)
1995 return;
1996
1997 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
1998 if (GET_CODE (pat) == SEQUENCE)
1999 {
2000 emit_insn_after (pat, insn);
2001 PUT_CODE (insn, NOTE);
2002 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2003 NOTE_SOURCE_FILE (insn) = 0;
2004 }
2005 else
2006 PATTERN (insn) = pat;
2007
2008 return;
2009 }
2010
2011 /* Otherwise, storing into VAR must be handled specially
2012 by storing into a temporary and copying that into VAR
2013 with a new insn after this one. Note that this case
2014 will be used when storing into a promoted scalar since
2015 the insn will now have different modes on the input
2016 and output and hence will be invalid (except for the case
2017 of setting it to a constant, which does not need any
2018 change if it is valid). We generate extra code in that case,
2019 but combine.c will eliminate it. */
2020
2021 if (dest == var)
2022 {
2023 rtx temp;
2024 rtx fixeddest = SET_DEST (x);
2025
2026 /* STRICT_LOW_PART can be discarded, around a MEM. */
2027 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2028 fixeddest = XEXP (fixeddest, 0);
2029 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2030 if (GET_CODE (fixeddest) == SUBREG)
2031 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2032 else
2033 fixeddest = fixup_stack_1 (fixeddest, insn);
2034
2035 temp = gen_reg_rtx (GET_MODE (SET_SRC (x)) == VOIDmode
2036 ? GET_MODE (fixeddest)
2037 : GET_MODE (SET_SRC (x)));
2038
2039 emit_insn_after (gen_move_insn (fixeddest,
2040 gen_lowpart (GET_MODE (fixeddest),
2041 temp)),
2042 insn);
2043
2044 SET_DEST (x) = temp;
2045 }
2046 }
2047 }
2048
2049 /* Nothing special about this RTX; fix its operands. */
2050
2051 fmt = GET_RTX_FORMAT (code);
2052 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2053 {
2054 if (fmt[i] == 'e')
2055 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2056 if (fmt[i] == 'E')
2057 {
2058 register int j;
2059 for (j = 0; j < XVECLEN (x, i); j++)
2060 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2061 insn, replacements);
2062 }
2063 }
2064 }
2065 \f
2066 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2067 return an rtx (MEM:m1 newaddr) which is equivalent.
2068 If any insns must be emitted to compute NEWADDR, put them before INSN.
2069
2070 UNCRITICAL nonzero means accept paradoxical subregs.
2071 This is used for subregs found inside of ZERO_EXTRACTs and in REG_NOTES. */
2072
2073 static rtx
2074 fixup_memory_subreg (x, insn, uncritical)
2075 rtx x;
2076 rtx insn;
2077 int uncritical;
2078 {
2079 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2080 rtx addr = XEXP (SUBREG_REG (x), 0);
2081 enum machine_mode mode = GET_MODE (x);
2082 rtx saved, result;
2083
2084 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2085 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2086 && ! uncritical)
2087 abort ();
2088
2089 if (BYTES_BIG_ENDIAN)
2090 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2091 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2092 addr = plus_constant (addr, offset);
2093 if (!flag_force_addr && memory_address_p (mode, addr))
2094 /* Shortcut if no insns need be emitted. */
2095 return change_address (SUBREG_REG (x), mode, addr);
2096 start_sequence ();
2097 result = change_address (SUBREG_REG (x), mode, addr);
2098 emit_insn_before (gen_sequence (), insn);
2099 end_sequence ();
2100 return result;
2101 }
2102
2103 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2104 Replace subexpressions of X in place.
2105 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2106 Otherwise return X, with its contents possibly altered.
2107
2108 If any insns must be emitted to compute NEWADDR, put them before INSN.
2109
2110 UNCRITICAL is as in fixup_memory_subreg. */
2111
2112 static rtx
2113 walk_fixup_memory_subreg (x, insn, uncritical)
2114 register rtx x;
2115 rtx insn;
2116 int uncritical;
2117 {
2118 register enum rtx_code code;
2119 register char *fmt;
2120 register int i;
2121
2122 if (x == 0)
2123 return 0;
2124
2125 code = GET_CODE (x);
2126
2127 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2128 return fixup_memory_subreg (x, insn, uncritical);
2129
2130 /* Nothing special about this RTX; fix its operands. */
2131
2132 fmt = GET_RTX_FORMAT (code);
2133 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2134 {
2135 if (fmt[i] == 'e')
2136 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2137 if (fmt[i] == 'E')
2138 {
2139 register int j;
2140 for (j = 0; j < XVECLEN (x, i); j++)
2141 XVECEXP (x, i, j)
2142 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2143 }
2144 }
2145 return x;
2146 }
2147 \f
2148 /* For each memory ref within X, if it refers to a stack slot
2149 with an out of range displacement, put the address in a temp register
2150 (emitting new insns before INSN to load these registers)
2151 and alter the memory ref to use that register.
2152 Replace each such MEM rtx with a copy, to avoid clobberage. */
2153
2154 static rtx
2155 fixup_stack_1 (x, insn)
2156 rtx x;
2157 rtx insn;
2158 {
2159 register int i;
2160 register RTX_CODE code = GET_CODE (x);
2161 register char *fmt;
2162
2163 if (code == MEM)
2164 {
2165 register rtx ad = XEXP (x, 0);
2166 /* If we have address of a stack slot but it's not valid
2167 (displacement is too large), compute the sum in a register. */
2168 if (GET_CODE (ad) == PLUS
2169 && GET_CODE (XEXP (ad, 0)) == REG
2170 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2171 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2172 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2173 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2174 {
2175 rtx temp, seq;
2176 if (memory_address_p (GET_MODE (x), ad))
2177 return x;
2178
2179 start_sequence ();
2180 temp = copy_to_reg (ad);
2181 seq = gen_sequence ();
2182 end_sequence ();
2183 emit_insn_before (seq, insn);
2184 return change_address (x, VOIDmode, temp);
2185 }
2186 return x;
2187 }
2188
2189 fmt = GET_RTX_FORMAT (code);
2190 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2191 {
2192 if (fmt[i] == 'e')
2193 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2194 if (fmt[i] == 'E')
2195 {
2196 register int j;
2197 for (j = 0; j < XVECLEN (x, i); j++)
2198 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2199 }
2200 }
2201 return x;
2202 }
2203 \f
2204 /* Optimization: a bit-field instruction whose field
2205 happens to be a byte or halfword in memory
2206 can be changed to a move instruction.
2207
2208 We call here when INSN is an insn to examine or store into a bit-field.
2209 BODY is the SET-rtx to be altered.
2210
2211 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2212 (Currently this is called only from function.c, and EQUIV_MEM
2213 is always 0.) */
2214
2215 static void
2216 optimize_bit_field (body, insn, equiv_mem)
2217 rtx body;
2218 rtx insn;
2219 rtx *equiv_mem;
2220 {
2221 register rtx bitfield;
2222 int destflag;
2223 rtx seq = 0;
2224 enum machine_mode mode;
2225
2226 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2227 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2228 bitfield = SET_DEST (body), destflag = 1;
2229 else
2230 bitfield = SET_SRC (body), destflag = 0;
2231
2232 /* First check that the field being stored has constant size and position
2233 and is in fact a byte or halfword suitably aligned. */
2234
2235 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2236 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2237 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2238 != BLKmode)
2239 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2240 {
2241 register rtx memref = 0;
2242
2243 /* Now check that the containing word is memory, not a register,
2244 and that it is safe to change the machine mode. */
2245
2246 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2247 memref = XEXP (bitfield, 0);
2248 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2249 && equiv_mem != 0)
2250 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2251 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2252 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2253 memref = SUBREG_REG (XEXP (bitfield, 0));
2254 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2255 && equiv_mem != 0
2256 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2257 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2258
2259 if (memref
2260 && ! mode_dependent_address_p (XEXP (memref, 0))
2261 && ! MEM_VOLATILE_P (memref))
2262 {
2263 /* Now adjust the address, first for any subreg'ing
2264 that we are now getting rid of,
2265 and then for which byte of the word is wanted. */
2266
2267 register int offset = INTVAL (XEXP (bitfield, 2));
2268 rtx insns;
2269
2270 /* Adjust OFFSET to count bits from low-address byte. */
2271 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2272 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2273 - offset - INTVAL (XEXP (bitfield, 1)));
2274
2275 /* Adjust OFFSET to count bytes from low-address byte. */
2276 offset /= BITS_PER_UNIT;
2277 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2278 {
2279 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2280 if (BYTES_BIG_ENDIAN)
2281 offset -= (MIN (UNITS_PER_WORD,
2282 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2283 - MIN (UNITS_PER_WORD,
2284 GET_MODE_SIZE (GET_MODE (memref))));
2285 }
2286
2287 start_sequence ();
2288 memref = change_address (memref, mode,
2289 plus_constant (XEXP (memref, 0), offset));
2290 insns = get_insns ();
2291 end_sequence ();
2292 emit_insns_before (insns, insn);
2293
2294 /* Store this memory reference where
2295 we found the bit field reference. */
2296
2297 if (destflag)
2298 {
2299 validate_change (insn, &SET_DEST (body), memref, 1);
2300 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2301 {
2302 rtx src = SET_SRC (body);
2303 while (GET_CODE (src) == SUBREG
2304 && SUBREG_WORD (src) == 0)
2305 src = SUBREG_REG (src);
2306 if (GET_MODE (src) != GET_MODE (memref))
2307 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2308 validate_change (insn, &SET_SRC (body), src, 1);
2309 }
2310 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2311 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2312 /* This shouldn't happen because anything that didn't have
2313 one of these modes should have got converted explicitly
2314 and then referenced through a subreg.
2315 This is so because the original bit-field was
2316 handled by agg_mode and so its tree structure had
2317 the same mode that memref now has. */
2318 abort ();
2319 }
2320 else
2321 {
2322 rtx dest = SET_DEST (body);
2323
2324 while (GET_CODE (dest) == SUBREG
2325 && SUBREG_WORD (dest) == 0
2326 && (GET_MODE_CLASS (GET_MODE (dest))
2327 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2328 dest = SUBREG_REG (dest);
2329
2330 validate_change (insn, &SET_DEST (body), dest, 1);
2331
2332 if (GET_MODE (dest) == GET_MODE (memref))
2333 validate_change (insn, &SET_SRC (body), memref, 1);
2334 else
2335 {
2336 /* Convert the mem ref to the destination mode. */
2337 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2338
2339 start_sequence ();
2340 convert_move (newreg, memref,
2341 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2342 seq = get_insns ();
2343 end_sequence ();
2344
2345 validate_change (insn, &SET_SRC (body), newreg, 1);
2346 }
2347 }
2348
2349 /* See if we can convert this extraction or insertion into
2350 a simple move insn. We might not be able to do so if this
2351 was, for example, part of a PARALLEL.
2352
2353 If we succeed, write out any needed conversions. If we fail,
2354 it is hard to guess why we failed, so don't do anything
2355 special; just let the optimization be suppressed. */
2356
2357 if (apply_change_group () && seq)
2358 emit_insns_before (seq, insn);
2359 }
2360 }
2361 }
2362 \f
2363 /* These routines are responsible for converting virtual register references
2364 to the actual hard register references once RTL generation is complete.
2365
2366 The following four variables are used for communication between the
2367 routines. They contain the offsets of the virtual registers from their
2368 respective hard registers. */
2369
2370 static int in_arg_offset;
2371 static int var_offset;
2372 static int dynamic_offset;
2373 static int out_arg_offset;
2374
2375 /* In most machines, the stack pointer register is equivalent to the bottom
2376 of the stack. */
2377
2378 #ifndef STACK_POINTER_OFFSET
2379 #define STACK_POINTER_OFFSET 0
2380 #endif
2381
2382 /* If not defined, pick an appropriate default for the offset of dynamically
2383 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2384 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2385
2386 #ifndef STACK_DYNAMIC_OFFSET
2387
2388 #ifdef ACCUMULATE_OUTGOING_ARGS
2389 /* The bottom of the stack points to the actual arguments. If
2390 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2391 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2392 stack space for register parameters is not pushed by the caller, but
2393 rather part of the fixed stack areas and hence not included in
2394 `current_function_outgoing_args_size'. Nevertheless, we must allow
2395 for it when allocating stack dynamic objects. */
2396
2397 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2398 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2399 (current_function_outgoing_args_size \
2400 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2401
2402 #else
2403 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2404 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2405 #endif
2406
2407 #else
2408 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2409 #endif
2410 #endif
2411
2412 /* Pass through the INSNS of function FNDECL and convert virtual register
2413 references to hard register references. */
2414
2415 void
2416 instantiate_virtual_regs (fndecl, insns)
2417 tree fndecl;
2418 rtx insns;
2419 {
2420 rtx insn;
2421
2422 /* Compute the offsets to use for this function. */
2423 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2424 var_offset = STARTING_FRAME_OFFSET;
2425 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2426 out_arg_offset = STACK_POINTER_OFFSET;
2427
2428 /* Scan all variables and parameters of this function. For each that is
2429 in memory, instantiate all virtual registers if the result is a valid
2430 address. If not, we do it later. That will handle most uses of virtual
2431 regs on many machines. */
2432 instantiate_decls (fndecl, 1);
2433
2434 /* Initialize recognition, indicating that volatile is OK. */
2435 init_recog ();
2436
2437 /* Scan through all the insns, instantiating every virtual register still
2438 present. */
2439 for (insn = insns; insn; insn = NEXT_INSN (insn))
2440 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2441 || GET_CODE (insn) == CALL_INSN)
2442 {
2443 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2444 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2445 }
2446
2447 /* Now instantiate the remaining register equivalences for debugging info.
2448 These will not be valid addresses. */
2449 instantiate_decls (fndecl, 0);
2450
2451 /* Indicate that, from now on, assign_stack_local should use
2452 frame_pointer_rtx. */
2453 virtuals_instantiated = 1;
2454 }
2455
2456 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2457 all virtual registers in their DECL_RTL's.
2458
2459 If VALID_ONLY, do this only if the resulting address is still valid.
2460 Otherwise, always do it. */
2461
2462 static void
2463 instantiate_decls (fndecl, valid_only)
2464 tree fndecl;
2465 int valid_only;
2466 {
2467 tree decl;
2468
2469 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2470 /* When compiling an inline function, the obstack used for
2471 rtl allocation is the maybepermanent_obstack. Calling
2472 `resume_temporary_allocation' switches us back to that
2473 obstack while we process this function's parameters. */
2474 resume_temporary_allocation ();
2475
2476 /* Process all parameters of the function. */
2477 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2478 {
2479 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2480 valid_only);
2481 instantiate_decl (DECL_INCOMING_RTL (decl),
2482 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
2483 }
2484
2485 /* Now process all variables defined in the function or its subblocks. */
2486 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2487
2488 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2489 {
2490 /* Save all rtl allocated for this function by raising the
2491 high-water mark on the maybepermanent_obstack. */
2492 preserve_data ();
2493 /* All further rtl allocation is now done in the current_obstack. */
2494 rtl_in_current_obstack ();
2495 }
2496 }
2497
2498 /* Subroutine of instantiate_decls: Process all decls in the given
2499 BLOCK node and all its subblocks. */
2500
2501 static void
2502 instantiate_decls_1 (let, valid_only)
2503 tree let;
2504 int valid_only;
2505 {
2506 tree t;
2507
2508 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2509 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2510 valid_only);
2511
2512 /* Process all subblocks. */
2513 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2514 instantiate_decls_1 (t, valid_only);
2515 }
2516
2517 /* Subroutine of the preceding procedures: Given RTL representing a
2518 decl and the size of the object, do any instantiation required.
2519
2520 If VALID_ONLY is non-zero, it means that the RTL should only be
2521 changed if the new address is valid. */
2522
2523 static void
2524 instantiate_decl (x, size, valid_only)
2525 rtx x;
2526 int size;
2527 int valid_only;
2528 {
2529 enum machine_mode mode;
2530 rtx addr;
2531
2532 /* If this is not a MEM, no need to do anything. Similarly if the
2533 address is a constant or a register that is not a virtual register. */
2534
2535 if (x == 0 || GET_CODE (x) != MEM)
2536 return;
2537
2538 addr = XEXP (x, 0);
2539 if (CONSTANT_P (addr)
2540 || (GET_CODE (addr) == REG
2541 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2542 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2543 return;
2544
2545 /* If we should only do this if the address is valid, copy the address.
2546 We need to do this so we can undo any changes that might make the
2547 address invalid. This copy is unfortunate, but probably can't be
2548 avoided. */
2549
2550 if (valid_only)
2551 addr = copy_rtx (addr);
2552
2553 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2554
2555 if (! valid_only)
2556 return;
2557
2558 /* Now verify that the resulting address is valid for every integer or
2559 floating-point mode up to and including SIZE bytes long. We do this
2560 since the object might be accessed in any mode and frame addresses
2561 are shared. */
2562
2563 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2564 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2565 mode = GET_MODE_WIDER_MODE (mode))
2566 if (! memory_address_p (mode, addr))
2567 return;
2568
2569 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2570 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2571 mode = GET_MODE_WIDER_MODE (mode))
2572 if (! memory_address_p (mode, addr))
2573 return;
2574
2575 /* Otherwise, put back the address, now that we have updated it and we
2576 know it is valid. */
2577
2578 XEXP (x, 0) = addr;
2579 }
2580 \f
2581 /* Given a pointer to a piece of rtx and an optional pointer to the
2582 containing object, instantiate any virtual registers present in it.
2583
2584 If EXTRA_INSNS, we always do the replacement and generate
2585 any extra insns before OBJECT. If it zero, we do nothing if replacement
2586 is not valid.
2587
2588 Return 1 if we either had nothing to do or if we were able to do the
2589 needed replacement. Return 0 otherwise; we only return zero if
2590 EXTRA_INSNS is zero.
2591
2592 We first try some simple transformations to avoid the creation of extra
2593 pseudos. */
2594
2595 static int
2596 instantiate_virtual_regs_1 (loc, object, extra_insns)
2597 rtx *loc;
2598 rtx object;
2599 int extra_insns;
2600 {
2601 rtx x;
2602 RTX_CODE code;
2603 rtx new = 0;
2604 int offset;
2605 rtx temp;
2606 rtx seq;
2607 int i, j;
2608 char *fmt;
2609
2610 /* Re-start here to avoid recursion in common cases. */
2611 restart:
2612
2613 x = *loc;
2614 if (x == 0)
2615 return 1;
2616
2617 code = GET_CODE (x);
2618
2619 /* Check for some special cases. */
2620 switch (code)
2621 {
2622 case CONST_INT:
2623 case CONST_DOUBLE:
2624 case CONST:
2625 case SYMBOL_REF:
2626 case CODE_LABEL:
2627 case PC:
2628 case CC0:
2629 case ASM_INPUT:
2630 case ADDR_VEC:
2631 case ADDR_DIFF_VEC:
2632 case RETURN:
2633 return 1;
2634
2635 case SET:
2636 /* We are allowed to set the virtual registers. This means that
2637 that the actual register should receive the source minus the
2638 appropriate offset. This is used, for example, in the handling
2639 of non-local gotos. */
2640 if (SET_DEST (x) == virtual_incoming_args_rtx)
2641 new = arg_pointer_rtx, offset = - in_arg_offset;
2642 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2643 new = frame_pointer_rtx, offset = - var_offset;
2644 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2645 new = stack_pointer_rtx, offset = - dynamic_offset;
2646 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2647 new = stack_pointer_rtx, offset = - out_arg_offset;
2648
2649 if (new)
2650 {
2651 /* The only valid sources here are PLUS or REG. Just do
2652 the simplest possible thing to handle them. */
2653 if (GET_CODE (SET_SRC (x)) != REG
2654 && GET_CODE (SET_SRC (x)) != PLUS)
2655 abort ();
2656
2657 start_sequence ();
2658 if (GET_CODE (SET_SRC (x)) != REG)
2659 temp = force_operand (SET_SRC (x), NULL_RTX);
2660 else
2661 temp = SET_SRC (x);
2662 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2663 seq = get_insns ();
2664 end_sequence ();
2665
2666 emit_insns_before (seq, object);
2667 SET_DEST (x) = new;
2668
2669 if (!validate_change (object, &SET_SRC (x), temp, 0)
2670 || ! extra_insns)
2671 abort ();
2672
2673 return 1;
2674 }
2675
2676 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2677 loc = &SET_SRC (x);
2678 goto restart;
2679
2680 case PLUS:
2681 /* Handle special case of virtual register plus constant. */
2682 if (CONSTANT_P (XEXP (x, 1)))
2683 {
2684 rtx old, new_offset;
2685
2686 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2687 if (GET_CODE (XEXP (x, 0)) == PLUS)
2688 {
2689 rtx inner = XEXP (XEXP (x, 0), 0);
2690
2691 if (inner == virtual_incoming_args_rtx)
2692 new = arg_pointer_rtx, offset = in_arg_offset;
2693 else if (inner == virtual_stack_vars_rtx)
2694 new = frame_pointer_rtx, offset = var_offset;
2695 else if (inner == virtual_stack_dynamic_rtx)
2696 new = stack_pointer_rtx, offset = dynamic_offset;
2697 else if (inner == virtual_outgoing_args_rtx)
2698 new = stack_pointer_rtx, offset = out_arg_offset;
2699 else
2700 {
2701 loc = &XEXP (x, 0);
2702 goto restart;
2703 }
2704
2705 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2706 extra_insns);
2707 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2708 }
2709
2710 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2711 new = arg_pointer_rtx, offset = in_arg_offset;
2712 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2713 new = frame_pointer_rtx, offset = var_offset;
2714 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2715 new = stack_pointer_rtx, offset = dynamic_offset;
2716 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2717 new = stack_pointer_rtx, offset = out_arg_offset;
2718 else
2719 {
2720 /* We know the second operand is a constant. Unless the
2721 first operand is a REG (which has been already checked),
2722 it needs to be checked. */
2723 if (GET_CODE (XEXP (x, 0)) != REG)
2724 {
2725 loc = &XEXP (x, 0);
2726 goto restart;
2727 }
2728 return 1;
2729 }
2730
2731 new_offset = plus_constant (XEXP (x, 1), offset);
2732
2733 /* If the new constant is zero, try to replace the sum with just
2734 the register. */
2735 if (new_offset == const0_rtx
2736 && validate_change (object, loc, new, 0))
2737 return 1;
2738
2739 /* Next try to replace the register and new offset.
2740 There are two changes to validate here and we can't assume that
2741 in the case of old offset equals new just changing the register
2742 will yield a valid insn. In the interests of a little efficiency,
2743 however, we only call validate change once (we don't queue up the
2744 changes and then call apply_change_group). */
2745
2746 old = XEXP (x, 0);
2747 if (offset == 0
2748 ? ! validate_change (object, &XEXP (x, 0), new, 0)
2749 : (XEXP (x, 0) = new,
2750 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
2751 {
2752 if (! extra_insns)
2753 {
2754 XEXP (x, 0) = old;
2755 return 0;
2756 }
2757
2758 /* Otherwise copy the new constant into a register and replace
2759 constant with that register. */
2760 temp = gen_reg_rtx (Pmode);
2761 XEXP (x, 0) = new;
2762 if (validate_change (object, &XEXP (x, 1), temp, 0))
2763 emit_insn_before (gen_move_insn (temp, new_offset), object);
2764 else
2765 {
2766 /* If that didn't work, replace this expression with a
2767 register containing the sum. */
2768
2769 XEXP (x, 0) = old;
2770 new = gen_rtx (PLUS, Pmode, new, new_offset);
2771
2772 start_sequence ();
2773 temp = force_operand (new, NULL_RTX);
2774 seq = get_insns ();
2775 end_sequence ();
2776
2777 emit_insns_before (seq, object);
2778 if (! validate_change (object, loc, temp, 0)
2779 && ! validate_replace_rtx (x, temp, object))
2780 abort ();
2781 }
2782 }
2783
2784 return 1;
2785 }
2786
2787 /* Fall through to generic two-operand expression case. */
2788 case EXPR_LIST:
2789 case CALL:
2790 case COMPARE:
2791 case MINUS:
2792 case MULT:
2793 case DIV: case UDIV:
2794 case MOD: case UMOD:
2795 case AND: case IOR: case XOR:
2796 case ROTATERT: case ROTATE:
2797 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2798 case NE: case EQ:
2799 case GE: case GT: case GEU: case GTU:
2800 case LE: case LT: case LEU: case LTU:
2801 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2802 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2803 loc = &XEXP (x, 0);
2804 goto restart;
2805
2806 case MEM:
2807 /* Most cases of MEM that convert to valid addresses have already been
2808 handled by our scan of regno_reg_rtx. The only special handling we
2809 need here is to make a copy of the rtx to ensure it isn't being
2810 shared if we have to change it to a pseudo.
2811
2812 If the rtx is a simple reference to an address via a virtual register,
2813 it can potentially be shared. In such cases, first try to make it
2814 a valid address, which can also be shared. Otherwise, copy it and
2815 proceed normally.
2816
2817 First check for common cases that need no processing. These are
2818 usually due to instantiation already being done on a previous instance
2819 of a shared rtx. */
2820
2821 temp = XEXP (x, 0);
2822 if (CONSTANT_ADDRESS_P (temp)
2823 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2824 || temp == arg_pointer_rtx
2825 #endif
2826 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2827 || temp == hard_frame_pointer_rtx
2828 #endif
2829 || temp == frame_pointer_rtx)
2830 return 1;
2831
2832 if (GET_CODE (temp) == PLUS
2833 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2834 && (XEXP (temp, 0) == frame_pointer_rtx
2835 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2836 || XEXP (temp, 0) == hard_frame_pointer_rtx
2837 #endif
2838 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2839 || XEXP (temp, 0) == arg_pointer_rtx
2840 #endif
2841 ))
2842 return 1;
2843
2844 if (temp == virtual_stack_vars_rtx
2845 || temp == virtual_incoming_args_rtx
2846 || (GET_CODE (temp) == PLUS
2847 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2848 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2849 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2850 {
2851 /* This MEM may be shared. If the substitution can be done without
2852 the need to generate new pseudos, we want to do it in place
2853 so all copies of the shared rtx benefit. The call below will
2854 only make substitutions if the resulting address is still
2855 valid.
2856
2857 Note that we cannot pass X as the object in the recursive call
2858 since the insn being processed may not allow all valid
2859 addresses. However, if we were not passed on object, we can
2860 only modify X without copying it if X will have a valid
2861 address.
2862
2863 ??? Also note that this can still lose if OBJECT is an insn that
2864 has less restrictions on an address that some other insn.
2865 In that case, we will modify the shared address. This case
2866 doesn't seem very likely, though. */
2867
2868 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2869 object ? object : x, 0))
2870 return 1;
2871
2872 /* Otherwise make a copy and process that copy. We copy the entire
2873 RTL expression since it might be a PLUS which could also be
2874 shared. */
2875 *loc = x = copy_rtx (x);
2876 }
2877
2878 /* Fall through to generic unary operation case. */
2879 case USE:
2880 case CLOBBER:
2881 case SUBREG:
2882 case STRICT_LOW_PART:
2883 case NEG: case NOT:
2884 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2885 case SIGN_EXTEND: case ZERO_EXTEND:
2886 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2887 case FLOAT: case FIX:
2888 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2889 case ABS:
2890 case SQRT:
2891 case FFS:
2892 /* These case either have just one operand or we know that we need not
2893 check the rest of the operands. */
2894 loc = &XEXP (x, 0);
2895 goto restart;
2896
2897 case REG:
2898 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2899 in front of this insn and substitute the temporary. */
2900 if (x == virtual_incoming_args_rtx)
2901 new = arg_pointer_rtx, offset = in_arg_offset;
2902 else if (x == virtual_stack_vars_rtx)
2903 new = frame_pointer_rtx, offset = var_offset;
2904 else if (x == virtual_stack_dynamic_rtx)
2905 new = stack_pointer_rtx, offset = dynamic_offset;
2906 else if (x == virtual_outgoing_args_rtx)
2907 new = stack_pointer_rtx, offset = out_arg_offset;
2908
2909 if (new)
2910 {
2911 temp = plus_constant (new, offset);
2912 if (!validate_change (object, loc, temp, 0))
2913 {
2914 if (! extra_insns)
2915 return 0;
2916
2917 start_sequence ();
2918 temp = force_operand (temp, NULL_RTX);
2919 seq = get_insns ();
2920 end_sequence ();
2921
2922 emit_insns_before (seq, object);
2923 if (! validate_change (object, loc, temp, 0)
2924 && ! validate_replace_rtx (x, temp, object))
2925 abort ();
2926 }
2927 }
2928
2929 return 1;
2930 }
2931
2932 /* Scan all subexpressions. */
2933 fmt = GET_RTX_FORMAT (code);
2934 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2935 if (*fmt == 'e')
2936 {
2937 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2938 return 0;
2939 }
2940 else if (*fmt == 'E')
2941 for (j = 0; j < XVECLEN (x, i); j++)
2942 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2943 extra_insns))
2944 return 0;
2945
2946 return 1;
2947 }
2948 \f
2949 /* Optimization: assuming this function does not receive nonlocal gotos,
2950 delete the handlers for such, as well as the insns to establish
2951 and disestablish them. */
2952
2953 static void
2954 delete_handlers ()
2955 {
2956 rtx insn;
2957 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2958 {
2959 /* Delete the handler by turning off the flag that would
2960 prevent jump_optimize from deleting it.
2961 Also permit deletion of the nonlocal labels themselves
2962 if nothing local refers to them. */
2963 if (GET_CODE (insn) == CODE_LABEL)
2964 {
2965 tree t, last_t;
2966
2967 LABEL_PRESERVE_P (insn) = 0;
2968
2969 /* Remove it from the nonlocal_label list, to avoid confusing
2970 flow. */
2971 for (t = nonlocal_labels, last_t = 0; t;
2972 last_t = t, t = TREE_CHAIN (t))
2973 if (DECL_RTL (TREE_VALUE (t)) == insn)
2974 break;
2975 if (t)
2976 {
2977 if (! last_t)
2978 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
2979 else
2980 TREE_CHAIN (last_t) = TREE_CHAIN (t);
2981 }
2982 }
2983 if (GET_CODE (insn) == INSN
2984 && ((nonlocal_goto_handler_slot != 0
2985 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
2986 || (nonlocal_goto_stack_level != 0
2987 && reg_mentioned_p (nonlocal_goto_stack_level,
2988 PATTERN (insn)))))
2989 delete_insn (insn);
2990 }
2991 }
2992
2993 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2994 of the current function. */
2995
2996 rtx
2997 nonlocal_label_rtx_list ()
2998 {
2999 tree t;
3000 rtx x = 0;
3001
3002 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3003 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
3004
3005 return x;
3006 }
3007 \f
3008 /* Output a USE for any register use in RTL.
3009 This is used with -noreg to mark the extent of lifespan
3010 of any registers used in a user-visible variable's DECL_RTL. */
3011
3012 void
3013 use_variable (rtl)
3014 rtx rtl;
3015 {
3016 if (GET_CODE (rtl) == REG)
3017 /* This is a register variable. */
3018 emit_insn (gen_rtx (USE, VOIDmode, rtl));
3019 else if (GET_CODE (rtl) == MEM
3020 && GET_CODE (XEXP (rtl, 0)) == REG
3021 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3022 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3023 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3024 /* This is a variable-sized structure. */
3025 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
3026 }
3027
3028 /* Like use_variable except that it outputs the USEs after INSN
3029 instead of at the end of the insn-chain. */
3030
3031 void
3032 use_variable_after (rtl, insn)
3033 rtx rtl, insn;
3034 {
3035 if (GET_CODE (rtl) == REG)
3036 /* This is a register variable. */
3037 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
3038 else if (GET_CODE (rtl) == MEM
3039 && GET_CODE (XEXP (rtl, 0)) == REG
3040 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3041 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3042 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3043 /* This is a variable-sized structure. */
3044 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
3045 }
3046 \f
3047 int
3048 max_parm_reg_num ()
3049 {
3050 return max_parm_reg;
3051 }
3052
3053 /* Return the first insn following those generated by `assign_parms'. */
3054
3055 rtx
3056 get_first_nonparm_insn ()
3057 {
3058 if (last_parm_insn)
3059 return NEXT_INSN (last_parm_insn);
3060 return get_insns ();
3061 }
3062
3063 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3064 Crash if there is none. */
3065
3066 rtx
3067 get_first_block_beg ()
3068 {
3069 register rtx searcher;
3070 register rtx insn = get_first_nonparm_insn ();
3071
3072 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3073 if (GET_CODE (searcher) == NOTE
3074 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3075 return searcher;
3076
3077 abort (); /* Invalid call to this function. (See comments above.) */
3078 return NULL_RTX;
3079 }
3080
3081 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3082 This means a type for which function calls must pass an address to the
3083 function or get an address back from the function.
3084 EXP may be a type node or an expression (whose type is tested). */
3085
3086 int
3087 aggregate_value_p (exp)
3088 tree exp;
3089 {
3090 int i, regno, nregs;
3091 rtx reg;
3092 tree type;
3093 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3094 type = exp;
3095 else
3096 type = TREE_TYPE (exp);
3097
3098 if (RETURN_IN_MEMORY (type))
3099 return 1;
3100 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3101 return 1;
3102 /* Make sure we have suitable call-clobbered regs to return
3103 the value in; if not, we must return it in memory. */
3104 reg = hard_function_value (type, 0);
3105 regno = REGNO (reg);
3106 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3107 for (i = 0; i < nregs; i++)
3108 if (! call_used_regs[regno + i])
3109 return 1;
3110 return 0;
3111 }
3112 \f
3113 /* Assign RTL expressions to the function's parameters.
3114 This may involve copying them into registers and using
3115 those registers as the RTL for them.
3116
3117 If SECOND_TIME is non-zero it means that this function is being
3118 called a second time. This is done by integrate.c when a function's
3119 compilation is deferred. We need to come back here in case the
3120 FUNCTION_ARG macro computes items needed for the rest of the compilation
3121 (such as changing which registers are fixed or caller-saved). But suppress
3122 writing any insns or setting DECL_RTL of anything in this case. */
3123
3124 void
3125 assign_parms (fndecl, second_time)
3126 tree fndecl;
3127 int second_time;
3128 {
3129 register tree parm;
3130 register rtx entry_parm = 0;
3131 register rtx stack_parm = 0;
3132 CUMULATIVE_ARGS args_so_far;
3133 enum machine_mode promoted_mode, passed_mode;
3134 enum machine_mode nominal_mode, promoted_nominal_mode;
3135 int unsignedp;
3136 /* Total space needed so far for args on the stack,
3137 given as a constant and a tree-expression. */
3138 struct args_size stack_args_size;
3139 tree fntype = TREE_TYPE (fndecl);
3140 tree fnargs = DECL_ARGUMENTS (fndecl);
3141 /* This is used for the arg pointer when referring to stack args. */
3142 rtx internal_arg_pointer;
3143 /* This is a dummy PARM_DECL that we used for the function result if
3144 the function returns a structure. */
3145 tree function_result_decl = 0;
3146 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
3147 int varargs_setup = 0;
3148 rtx conversion_insns = 0;
3149 /* FUNCTION_ARG may look at this variable. Since this is not
3150 expanding a call it will always be zero in this function. */
3151 int current_call_is_indirect = 0;
3152
3153 /* Nonzero if the last arg is named `__builtin_va_alist',
3154 which is used on some machines for old-fashioned non-ANSI varargs.h;
3155 this should be stuck onto the stack as if it had arrived there. */
3156 int hide_last_arg
3157 = (current_function_varargs
3158 && fnargs
3159 && (parm = tree_last (fnargs)) != 0
3160 && DECL_NAME (parm)
3161 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3162 "__builtin_va_alist")));
3163
3164 /* Nonzero if function takes extra anonymous args.
3165 This means the last named arg must be on the stack
3166 right before the anonymous ones. */
3167 int stdarg
3168 = (TYPE_ARG_TYPES (fntype) != 0
3169 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3170 != void_type_node));
3171
3172 /* If the reg that the virtual arg pointer will be translated into is
3173 not a fixed reg or is the stack pointer, make a copy of the virtual
3174 arg pointer, and address parms via the copy. The frame pointer is
3175 considered fixed even though it is not marked as such.
3176
3177 The second time through, simply use ap to avoid generating rtx. */
3178
3179 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3180 || ! (fixed_regs[ARG_POINTER_REGNUM]
3181 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3182 && ! second_time)
3183 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3184 else
3185 internal_arg_pointer = virtual_incoming_args_rtx;
3186 current_function_internal_arg_pointer = internal_arg_pointer;
3187
3188 stack_args_size.constant = 0;
3189 stack_args_size.var = 0;
3190
3191 /* If struct value address is treated as the first argument, make it so. */
3192 if (aggregate_value_p (DECL_RESULT (fndecl))
3193 && ! current_function_returns_pcc_struct
3194 && struct_value_incoming_rtx == 0)
3195 {
3196 tree type = build_pointer_type (fntype);
3197
3198 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3199
3200 DECL_ARG_TYPE (function_result_decl) = type;
3201 TREE_CHAIN (function_result_decl) = fnargs;
3202 fnargs = function_result_decl;
3203 }
3204
3205 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
3206 bzero ((char *) parm_reg_stack_loc, nparmregs * sizeof (rtx));
3207
3208 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3209 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3210 #else
3211 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX);
3212 #endif
3213
3214 /* We haven't yet found an argument that we must push and pretend the
3215 caller did. */
3216 current_function_pretend_args_size = 0;
3217
3218 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3219 {
3220 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3221 struct args_size stack_offset;
3222 struct args_size arg_size;
3223 int passed_pointer = 0;
3224 int did_conversion = 0;
3225 tree passed_type = DECL_ARG_TYPE (parm);
3226 tree nominal_type = TREE_TYPE (parm);
3227
3228 /* Set LAST_NAMED if this is last named arg before some
3229 anonymous args. We treat it as if it were anonymous too. */
3230 int last_named = ((TREE_CHAIN (parm) == 0
3231 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3232 && (stdarg || current_function_varargs));
3233
3234 if (TREE_TYPE (parm) == error_mark_node
3235 /* This can happen after weird syntax errors
3236 or if an enum type is defined among the parms. */
3237 || TREE_CODE (parm) != PARM_DECL
3238 || passed_type == NULL)
3239 {
3240 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
3241 const0_rtx);
3242 TREE_USED (parm) = 1;
3243 continue;
3244 }
3245
3246 /* For varargs.h function, save info about regs and stack space
3247 used by the individual args, not including the va_alist arg. */
3248 if (hide_last_arg && last_named)
3249 current_function_args_info = args_so_far;
3250
3251 /* Find mode of arg as it is passed, and mode of arg
3252 as it should be during execution of this function. */
3253 passed_mode = TYPE_MODE (passed_type);
3254 nominal_mode = TYPE_MODE (nominal_type);
3255
3256 /* If the parm's mode is VOID, its value doesn't matter,
3257 and avoid the usual things like emit_move_insn that could crash. */
3258 if (nominal_mode == VOIDmode)
3259 {
3260 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3261 continue;
3262 }
3263
3264 /* If the parm is to be passed as a transparent union, use the
3265 type of the first field for the tests below. We have already
3266 verified that the modes are the same. */
3267 if (DECL_TRANSPARENT_UNION (parm)
3268 || TYPE_TRANSPARENT_UNION (passed_type))
3269 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3270
3271 /* See if this arg was passed by invisible reference. It is if
3272 it is an object whose size depends on the contents of the
3273 object itself or if the machine requires these objects be passed
3274 that way. */
3275
3276 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3277 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3278 || TREE_ADDRESSABLE (passed_type)
3279 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3280 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3281 passed_type, ! last_named)
3282 #endif
3283 )
3284 {
3285 passed_type = nominal_type = build_pointer_type (passed_type);
3286 passed_pointer = 1;
3287 passed_mode = nominal_mode = Pmode;
3288 }
3289
3290 promoted_mode = passed_mode;
3291
3292 #ifdef PROMOTE_FUNCTION_ARGS
3293 /* Compute the mode in which the arg is actually extended to. */
3294 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3295 #endif
3296
3297 /* Let machine desc say which reg (if any) the parm arrives in.
3298 0 means it arrives on the stack. */
3299 #ifdef FUNCTION_INCOMING_ARG
3300 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3301 passed_type, ! last_named);
3302 #else
3303 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3304 passed_type, ! last_named);
3305 #endif
3306
3307 if (entry_parm == 0)
3308 promoted_mode = passed_mode;
3309
3310 #ifdef SETUP_INCOMING_VARARGS
3311 /* If this is the last named parameter, do any required setup for
3312 varargs or stdargs. We need to know about the case of this being an
3313 addressable type, in which case we skip the registers it
3314 would have arrived in.
3315
3316 For stdargs, LAST_NAMED will be set for two parameters, the one that
3317 is actually the last named, and the dummy parameter. We only
3318 want to do this action once.
3319
3320 Also, indicate when RTL generation is to be suppressed. */
3321 if (last_named && !varargs_setup)
3322 {
3323 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3324 current_function_pretend_args_size,
3325 second_time);
3326 varargs_setup = 1;
3327 }
3328 #endif
3329
3330 /* Determine parm's home in the stack,
3331 in case it arrives in the stack or we should pretend it did.
3332
3333 Compute the stack position and rtx where the argument arrives
3334 and its size.
3335
3336 There is one complexity here: If this was a parameter that would
3337 have been passed in registers, but wasn't only because it is
3338 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3339 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3340 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3341 0 as it was the previous time. */
3342
3343 locate_and_pad_parm (promoted_mode, passed_type,
3344 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3345 1,
3346 #else
3347 #ifdef FUNCTION_INCOMING_ARG
3348 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3349 passed_type,
3350 (! last_named
3351 || varargs_setup)) != 0,
3352 #else
3353 FUNCTION_ARG (args_so_far, promoted_mode,
3354 passed_type,
3355 ! last_named || varargs_setup) != 0,
3356 #endif
3357 #endif
3358 fndecl, &stack_args_size, &stack_offset, &arg_size);
3359
3360 if (! second_time)
3361 {
3362 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3363
3364 if (offset_rtx == const0_rtx)
3365 stack_parm = gen_rtx (MEM, promoted_mode, internal_arg_pointer);
3366 else
3367 stack_parm = gen_rtx (MEM, promoted_mode,
3368 gen_rtx (PLUS, Pmode,
3369 internal_arg_pointer, offset_rtx));
3370
3371 /* If this is a memory ref that contains aggregate components,
3372 mark it as such for cse and loop optimize. */
3373 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3374 }
3375
3376 /* If this parameter was passed both in registers and in the stack,
3377 use the copy on the stack. */
3378 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3379 entry_parm = 0;
3380
3381 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3382 /* If this parm was passed part in regs and part in memory,
3383 pretend it arrived entirely in memory
3384 by pushing the register-part onto the stack.
3385
3386 In the special case of a DImode or DFmode that is split,
3387 we could put it together in a pseudoreg directly,
3388 but for now that's not worth bothering with. */
3389
3390 if (entry_parm)
3391 {
3392 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3393 passed_type, ! last_named);
3394
3395 if (nregs > 0)
3396 {
3397 current_function_pretend_args_size
3398 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3399 / (PARM_BOUNDARY / BITS_PER_UNIT)
3400 * (PARM_BOUNDARY / BITS_PER_UNIT));
3401
3402 if (! second_time)
3403 move_block_from_reg (REGNO (entry_parm),
3404 validize_mem (stack_parm), nregs,
3405 int_size_in_bytes (TREE_TYPE (parm)));
3406 entry_parm = stack_parm;
3407 }
3408 }
3409 #endif
3410
3411 /* If we didn't decide this parm came in a register,
3412 by default it came on the stack. */
3413 if (entry_parm == 0)
3414 entry_parm = stack_parm;
3415
3416 /* Record permanently how this parm was passed. */
3417 if (! second_time)
3418 DECL_INCOMING_RTL (parm) = entry_parm;
3419
3420 /* If there is actually space on the stack for this parm,
3421 count it in stack_args_size; otherwise set stack_parm to 0
3422 to indicate there is no preallocated stack slot for the parm. */
3423
3424 if (entry_parm == stack_parm
3425 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3426 /* On some machines, even if a parm value arrives in a register
3427 there is still an (uninitialized) stack slot allocated for it.
3428
3429 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3430 whether this parameter already has a stack slot allocated,
3431 because an arg block exists only if current_function_args_size
3432 is larger than some threshhold, and we haven't calculated that
3433 yet. So, for now, we just assume that stack slots never exist
3434 in this case. */
3435 || REG_PARM_STACK_SPACE (fndecl) > 0
3436 #endif
3437 )
3438 {
3439 stack_args_size.constant += arg_size.constant;
3440 if (arg_size.var)
3441 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3442 }
3443 else
3444 /* No stack slot was pushed for this parm. */
3445 stack_parm = 0;
3446
3447 /* Update info on where next arg arrives in registers. */
3448
3449 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3450 passed_type, ! last_named);
3451
3452 /* If this is our second time through, we are done with this parm. */
3453 if (second_time)
3454 continue;
3455
3456 /* If we can't trust the parm stack slot to be aligned enough
3457 for its ultimate type, don't use that slot after entry.
3458 We'll make another stack slot, if we need one. */
3459 {
3460 int thisparm_boundary
3461 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3462
3463 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3464 stack_parm = 0;
3465 }
3466
3467 /* If parm was passed in memory, and we need to convert it on entry,
3468 don't store it back in that same slot. */
3469 if (entry_parm != 0
3470 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3471 stack_parm = 0;
3472
3473 #if 0
3474 /* Now adjust STACK_PARM to the mode and precise location
3475 where this parameter should live during execution,
3476 if we discover that it must live in the stack during execution.
3477 To make debuggers happier on big-endian machines, we store
3478 the value in the last bytes of the space available. */
3479
3480 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3481 && stack_parm != 0)
3482 {
3483 rtx offset_rtx;
3484
3485 if (BYTES_BIG_ENDIAN
3486 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3487 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3488 - GET_MODE_SIZE (nominal_mode));
3489
3490 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3491 if (offset_rtx == const0_rtx)
3492 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
3493 else
3494 stack_parm = gen_rtx (MEM, nominal_mode,
3495 gen_rtx (PLUS, Pmode,
3496 internal_arg_pointer, offset_rtx));
3497
3498 /* If this is a memory ref that contains aggregate components,
3499 mark it as such for cse and loop optimize. */
3500 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3501 }
3502 #endif /* 0 */
3503
3504 #ifdef STACK_REGS
3505 /* We need this "use" info, because the gcc-register->stack-register
3506 converter in reg-stack.c needs to know which registers are active
3507 at the start of the function call. The actual parameter loading
3508 instructions are not always available then anymore, since they might
3509 have been optimised away. */
3510
3511 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
3512 emit_insn (gen_rtx (USE, GET_MODE (entry_parm), entry_parm));
3513 #endif
3514
3515 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3516 in the mode in which it arrives.
3517 STACK_PARM is an RTX for a stack slot where the parameter can live
3518 during the function (in case we want to put it there).
3519 STACK_PARM is 0 if no stack slot was pushed for it.
3520
3521 Now output code if necessary to convert ENTRY_PARM to
3522 the type in which this function declares it,
3523 and store that result in an appropriate place,
3524 which may be a pseudo reg, may be STACK_PARM,
3525 or may be a local stack slot if STACK_PARM is 0.
3526
3527 Set DECL_RTL to that place. */
3528
3529 if (nominal_mode == BLKmode)
3530 {
3531 /* If a BLKmode arrives in registers, copy it to a stack slot. */
3532 if (GET_CODE (entry_parm) == REG)
3533 {
3534 int size_stored
3535 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3536 UNITS_PER_WORD);
3537
3538 /* Note that we will be storing an integral number of words.
3539 So we have to be careful to ensure that we allocate an
3540 integral number of words. We do this below in the
3541 assign_stack_local if space was not allocated in the argument
3542 list. If it was, this will not work if PARM_BOUNDARY is not
3543 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3544 if it becomes a problem. */
3545
3546 if (stack_parm == 0)
3547 {
3548 stack_parm
3549 = assign_stack_local (GET_MODE (entry_parm),
3550 size_stored, 0);
3551
3552 /* If this is a memory ref that contains aggregate
3553 components, mark it as such for cse and loop optimize. */
3554 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3555 }
3556
3557 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3558 abort ();
3559
3560 if (TREE_READONLY (parm))
3561 RTX_UNCHANGING_P (stack_parm) = 1;
3562
3563 move_block_from_reg (REGNO (entry_parm),
3564 validize_mem (stack_parm),
3565 size_stored / UNITS_PER_WORD,
3566 int_size_in_bytes (TREE_TYPE (parm)));
3567 }
3568 DECL_RTL (parm) = stack_parm;
3569 }
3570 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
3571 && ! DECL_INLINE (fndecl))
3572 /* layout_decl may set this. */
3573 || TREE_ADDRESSABLE (parm)
3574 || TREE_SIDE_EFFECTS (parm)
3575 /* If -ffloat-store specified, don't put explicit
3576 float variables into registers. */
3577 || (flag_float_store
3578 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3579 /* Always assign pseudo to structure return or item passed
3580 by invisible reference. */
3581 || passed_pointer || parm == function_result_decl)
3582 {
3583 /* Store the parm in a pseudoregister during the function, but we
3584 may need to do it in a wider mode. */
3585
3586 register rtx parmreg;
3587 int regno, regnoi, regnor;
3588
3589 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3590
3591 promoted_nominal_mode
3592 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
3593
3594 parmreg = gen_reg_rtx (promoted_nominal_mode);
3595 REG_USERVAR_P (parmreg) = 1;
3596
3597 /* If this was an item that we received a pointer to, set DECL_RTL
3598 appropriately. */
3599 if (passed_pointer)
3600 {
3601 DECL_RTL (parm)
3602 = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3603 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3604 }
3605 else
3606 DECL_RTL (parm) = parmreg;
3607
3608 /* Copy the value into the register. */
3609 if (nominal_mode != passed_mode
3610 || promoted_nominal_mode != promoted_mode)
3611 {
3612 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3613 mode, by the caller. We now have to convert it to
3614 NOMINAL_MODE, if different. However, PARMREG may be in
3615 a diffent mode than NOMINAL_MODE if it is being stored
3616 promoted.
3617
3618 If ENTRY_PARM is a hard register, it might be in a register
3619 not valid for operating in its mode (e.g., an odd-numbered
3620 register for a DFmode). In that case, moves are the only
3621 thing valid, so we can't do a convert from there. This
3622 occurs when the calling sequence allow such misaligned
3623 usages.
3624
3625 In addition, the conversion may involve a call, which could
3626 clobber parameters which haven't been copied to pseudo
3627 registers yet. Therefore, we must first copy the parm to
3628 a pseudo reg here, and save the conversion until after all
3629 parameters have been moved. */
3630
3631 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3632
3633 emit_move_insn (tempreg, validize_mem (entry_parm));
3634 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
3635
3636 push_to_sequence (conversion_insns);
3637 expand_assignment (parm,
3638 make_tree (nominal_type, tempreg), 0, 0);
3639 conversion_insns = get_insns ();
3640 did_conversion = 1;
3641 end_sequence ();
3642 }
3643 else
3644 emit_move_insn (parmreg, validize_mem (entry_parm));
3645
3646 /* If we were passed a pointer but the actual value
3647 can safely live in a register, put it in one. */
3648 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3649 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3650 && ! DECL_INLINE (fndecl))
3651 /* layout_decl may set this. */
3652 || TREE_ADDRESSABLE (parm)
3653 || TREE_SIDE_EFFECTS (parm)
3654 /* If -ffloat-store specified, don't put explicit
3655 float variables into registers. */
3656 || (flag_float_store
3657 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3658 {
3659 /* We can't use nominal_mode, because it will have been set to
3660 Pmode above. We must use the actual mode of the parm. */
3661 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3662 REG_USERVAR_P (parmreg) = 1;
3663 emit_move_insn (parmreg, DECL_RTL (parm));
3664 DECL_RTL (parm) = parmreg;
3665 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3666 now the parm. */
3667 stack_parm = 0;
3668 }
3669 #ifdef FUNCTION_ARG_CALLEE_COPIES
3670 /* If we are passed an arg by reference and it is our responsibility
3671 to make a copy, do it now.
3672 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3673 original argument, so we must recreate them in the call to
3674 FUNCTION_ARG_CALLEE_COPIES. */
3675 /* ??? Later add code to handle the case that if the argument isn't
3676 modified, don't do the copy. */
3677
3678 else if (passed_pointer
3679 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
3680 TYPE_MODE (DECL_ARG_TYPE (parm)),
3681 DECL_ARG_TYPE (parm),
3682 ! last_named))
3683 {
3684 rtx copy;
3685 tree type = DECL_ARG_TYPE (parm);
3686
3687 /* This sequence may involve a library call perhaps clobbering
3688 registers that haven't been copied to pseudos yet. */
3689
3690 push_to_sequence (conversion_insns);
3691
3692 if (TYPE_SIZE (type) == 0
3693 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3694 /* This is a variable sized object. */
3695 copy = gen_rtx (MEM, BLKmode,
3696 allocate_dynamic_stack_space
3697 (expr_size (parm), NULL_RTX,
3698 TYPE_ALIGN (type)));
3699 else
3700 copy = assign_stack_temp (TYPE_MODE (type),
3701 int_size_in_bytes (type), 1);
3702 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
3703
3704 store_expr (parm, copy, 0);
3705 emit_move_insn (parmreg, XEXP (copy, 0));
3706 conversion_insns = get_insns ();
3707 did_conversion = 1;
3708 end_sequence ();
3709 }
3710 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3711
3712 /* In any case, record the parm's desired stack location
3713 in case we later discover it must live in the stack.
3714
3715 If it is a COMPLEX value, store the stack location for both
3716 halves. */
3717
3718 if (GET_CODE (parmreg) == CONCAT)
3719 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
3720 else
3721 regno = REGNO (parmreg);
3722
3723 if (regno >= nparmregs)
3724 {
3725 rtx *new;
3726 int old_nparmregs = nparmregs;
3727
3728 nparmregs = regno + 5;
3729 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3730 bcopy ((char *) parm_reg_stack_loc, (char *) new,
3731 old_nparmregs * sizeof (rtx));
3732 bzero ((char *) (new + old_nparmregs),
3733 (nparmregs - old_nparmregs) * sizeof (rtx));
3734 parm_reg_stack_loc = new;
3735 }
3736
3737 if (GET_CODE (parmreg) == CONCAT)
3738 {
3739 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
3740
3741 regnor = REGNO (gen_realpart (submode, parmreg));
3742 regnoi = REGNO (gen_imagpart (submode, parmreg));
3743
3744 if (stack_parm != 0)
3745 {
3746 parm_reg_stack_loc[regnor]
3747 = gen_realpart (submode, stack_parm);
3748 parm_reg_stack_loc[regnoi]
3749 = gen_imagpart (submode, stack_parm);
3750 }
3751 else
3752 {
3753 parm_reg_stack_loc[regnor] = 0;
3754 parm_reg_stack_loc[regnoi] = 0;
3755 }
3756 }
3757 else
3758 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3759
3760 /* Mark the register as eliminable if we did no conversion
3761 and it was copied from memory at a fixed offset,
3762 and the arg pointer was not copied to a pseudo-reg.
3763 If the arg pointer is a pseudo reg or the offset formed
3764 an invalid address, such memory-equivalences
3765 as we make here would screw up life analysis for it. */
3766 if (nominal_mode == passed_mode
3767 && ! did_conversion
3768 && GET_CODE (entry_parm) == MEM
3769 && entry_parm == stack_parm
3770 && stack_offset.var == 0
3771 && reg_mentioned_p (virtual_incoming_args_rtx,
3772 XEXP (entry_parm, 0)))
3773 {
3774 rtx linsn = get_last_insn ();
3775
3776 /* Mark complex types separately. */
3777 if (GET_CODE (parmreg) == CONCAT)
3778 {
3779 REG_NOTES (linsn)
3780 = gen_rtx (EXPR_LIST, REG_EQUIV,
3781 parm_reg_stack_loc[regnoi], REG_NOTES (linsn));
3782
3783 /* Now search backward for where we set the real part. */
3784 for (; linsn != 0
3785 && ! reg_referenced_p (parm_reg_stack_loc[regnor],
3786 PATTERN (linsn));
3787 linsn = prev_nonnote_insn (linsn))
3788 ;
3789
3790 REG_NOTES (linsn)
3791 = gen_rtx (EXPR_LIST, REG_EQUIV,
3792 parm_reg_stack_loc[regnor], REG_NOTES (linsn));
3793 }
3794 else
3795 REG_NOTES (linsn)
3796 = gen_rtx (EXPR_LIST, REG_EQUIV,
3797 entry_parm, REG_NOTES (linsn));
3798 }
3799
3800 /* For pointer data type, suggest pointer register. */
3801 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3802 mark_reg_pointer (parmreg);
3803 }
3804 else
3805 {
3806 /* Value must be stored in the stack slot STACK_PARM
3807 during function execution. */
3808
3809 if (promoted_mode != nominal_mode)
3810 {
3811 /* Conversion is required. */
3812 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3813
3814 emit_move_insn (tempreg, validize_mem (entry_parm));
3815
3816 push_to_sequence (conversion_insns);
3817 entry_parm = convert_to_mode (nominal_mode, tempreg,
3818 TREE_UNSIGNED (TREE_TYPE (parm)));
3819 conversion_insns = get_insns ();
3820 did_conversion = 1;
3821 end_sequence ();
3822 }
3823
3824 if (entry_parm != stack_parm)
3825 {
3826 if (stack_parm == 0)
3827 {
3828 stack_parm
3829 = assign_stack_local (GET_MODE (entry_parm),
3830 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3831 /* If this is a memory ref that contains aggregate components,
3832 mark it as such for cse and loop optimize. */
3833 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3834 }
3835
3836 if (promoted_mode != nominal_mode)
3837 {
3838 push_to_sequence (conversion_insns);
3839 emit_move_insn (validize_mem (stack_parm),
3840 validize_mem (entry_parm));
3841 conversion_insns = get_insns ();
3842 end_sequence ();
3843 }
3844 else
3845 emit_move_insn (validize_mem (stack_parm),
3846 validize_mem (entry_parm));
3847 }
3848
3849 DECL_RTL (parm) = stack_parm;
3850 }
3851
3852 /* If this "parameter" was the place where we are receiving the
3853 function's incoming structure pointer, set up the result. */
3854 if (parm == function_result_decl)
3855 {
3856 tree result = DECL_RESULT (fndecl);
3857 tree restype = TREE_TYPE (result);
3858
3859 DECL_RTL (result)
3860 = gen_rtx (MEM, DECL_MODE (result), DECL_RTL (parm));
3861
3862 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
3863 }
3864
3865 if (TREE_THIS_VOLATILE (parm))
3866 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
3867 if (TREE_READONLY (parm))
3868 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
3869 }
3870
3871 /* Output all parameter conversion instructions (possibly including calls)
3872 now that all parameters have been copied out of hard registers. */
3873 emit_insns (conversion_insns);
3874
3875 max_parm_reg = max_reg_num ();
3876 last_parm_insn = get_last_insn ();
3877
3878 current_function_args_size = stack_args_size.constant;
3879
3880 /* Adjust function incoming argument size for alignment and
3881 minimum length. */
3882
3883 #ifdef REG_PARM_STACK_SPACE
3884 #ifndef MAYBE_REG_PARM_STACK_SPACE
3885 current_function_args_size = MAX (current_function_args_size,
3886 REG_PARM_STACK_SPACE (fndecl));
3887 #endif
3888 #endif
3889
3890 #ifdef STACK_BOUNDARY
3891 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
3892
3893 current_function_args_size
3894 = ((current_function_args_size + STACK_BYTES - 1)
3895 / STACK_BYTES) * STACK_BYTES;
3896 #endif
3897
3898 #ifdef ARGS_GROW_DOWNWARD
3899 current_function_arg_offset_rtx
3900 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
3901 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
3902 size_int (-stack_args_size.constant)),
3903 NULL_RTX, VOIDmode, 0));
3904 #else
3905 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
3906 #endif
3907
3908 /* See how many bytes, if any, of its args a function should try to pop
3909 on return. */
3910
3911 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3912 current_function_args_size);
3913
3914 /* For stdarg.h function, save info about
3915 regs and stack space used by the named args. */
3916
3917 if (!hide_last_arg)
3918 current_function_args_info = args_so_far;
3919
3920 /* Set the rtx used for the function return value. Put this in its
3921 own variable so any optimizers that need this information don't have
3922 to include tree.h. Do this here so it gets done when an inlined
3923 function gets output. */
3924
3925 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
3926 }
3927 \f
3928 /* Indicate whether REGNO is an incoming argument to the current function
3929 that was promoted to a wider mode. If so, return the RTX for the
3930 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3931 that REGNO is promoted from and whether the promotion was signed or
3932 unsigned. */
3933
3934 #ifdef PROMOTE_FUNCTION_ARGS
3935
3936 rtx
3937 promoted_input_arg (regno, pmode, punsignedp)
3938 int regno;
3939 enum machine_mode *pmode;
3940 int *punsignedp;
3941 {
3942 tree arg;
3943
3944 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3945 arg = TREE_CHAIN (arg))
3946 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
3947 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3948 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3949 {
3950 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3951 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
3952
3953 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3954 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3955 && mode != DECL_MODE (arg))
3956 {
3957 *pmode = DECL_MODE (arg);
3958 *punsignedp = unsignedp;
3959 return DECL_INCOMING_RTL (arg);
3960 }
3961 }
3962
3963 return 0;
3964 }
3965
3966 #endif
3967 \f
3968 /* Compute the size and offset from the start of the stacked arguments for a
3969 parm passed in mode PASSED_MODE and with type TYPE.
3970
3971 INITIAL_OFFSET_PTR points to the current offset into the stacked
3972 arguments.
3973
3974 The starting offset and size for this parm are returned in *OFFSET_PTR
3975 and *ARG_SIZE_PTR, respectively.
3976
3977 IN_REGS is non-zero if the argument will be passed in registers. It will
3978 never be set if REG_PARM_STACK_SPACE is not defined.
3979
3980 FNDECL is the function in which the argument was defined.
3981
3982 There are two types of rounding that are done. The first, controlled by
3983 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3984 list to be aligned to the specific boundary (in bits). This rounding
3985 affects the initial and starting offsets, but not the argument size.
3986
3987 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3988 optionally rounds the size of the parm to PARM_BOUNDARY. The
3989 initial offset is not affected by this rounding, while the size always
3990 is and the starting offset may be. */
3991
3992 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
3993 initial_offset_ptr is positive because locate_and_pad_parm's
3994 callers pass in the total size of args so far as
3995 initial_offset_ptr. arg_size_ptr is always positive.*/
3996
3997 void
3998 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
3999 initial_offset_ptr, offset_ptr, arg_size_ptr)
4000 enum machine_mode passed_mode;
4001 tree type;
4002 int in_regs;
4003 tree fndecl;
4004 struct args_size *initial_offset_ptr;
4005 struct args_size *offset_ptr;
4006 struct args_size *arg_size_ptr;
4007 {
4008 tree sizetree
4009 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4010 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4011 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4012 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4013 int reg_parm_stack_space = 0;
4014
4015 #ifdef REG_PARM_STACK_SPACE
4016 /* If we have found a stack parm before we reach the end of the
4017 area reserved for registers, skip that area. */
4018 if (! in_regs)
4019 {
4020 #ifdef MAYBE_REG_PARM_STACK_SPACE
4021 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4022 #else
4023 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4024 #endif
4025 if (reg_parm_stack_space > 0)
4026 {
4027 if (initial_offset_ptr->var)
4028 {
4029 initial_offset_ptr->var
4030 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4031 size_int (reg_parm_stack_space));
4032 initial_offset_ptr->constant = 0;
4033 }
4034 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4035 initial_offset_ptr->constant = reg_parm_stack_space;
4036 }
4037 }
4038 #endif /* REG_PARM_STACK_SPACE */
4039
4040 arg_size_ptr->var = 0;
4041 arg_size_ptr->constant = 0;
4042
4043 #ifdef ARGS_GROW_DOWNWARD
4044 if (initial_offset_ptr->var)
4045 {
4046 offset_ptr->constant = 0;
4047 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4048 initial_offset_ptr->var);
4049 }
4050 else
4051 {
4052 offset_ptr->constant = - initial_offset_ptr->constant;
4053 offset_ptr->var = 0;
4054 }
4055 if (where_pad != none
4056 && (TREE_CODE (sizetree) != INTEGER_CST
4057 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4058 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4059 SUB_PARM_SIZE (*offset_ptr, sizetree);
4060 if (where_pad != downward)
4061 pad_to_arg_alignment (offset_ptr, boundary);
4062 if (initial_offset_ptr->var)
4063 {
4064 arg_size_ptr->var = size_binop (MINUS_EXPR,
4065 size_binop (MINUS_EXPR,
4066 integer_zero_node,
4067 initial_offset_ptr->var),
4068 offset_ptr->var);
4069 }
4070 else
4071 {
4072 arg_size_ptr->constant = (- initial_offset_ptr->constant -
4073 offset_ptr->constant);
4074 }
4075 #else /* !ARGS_GROW_DOWNWARD */
4076 pad_to_arg_alignment (initial_offset_ptr, boundary);
4077 *offset_ptr = *initial_offset_ptr;
4078
4079 #ifdef PUSH_ROUNDING
4080 if (passed_mode != BLKmode)
4081 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4082 #endif
4083
4084 /* Pad_below needs the pre-rounded size to know how much to pad below
4085 so this must be done before rounding up. */
4086 if (where_pad == downward
4087 /* However, BLKmode args passed in regs have their padding done elsewhere.
4088 The stack slot must be able to hold the entire register. */
4089 && !(in_regs && passed_mode == BLKmode))
4090 pad_below (offset_ptr, passed_mode, sizetree);
4091
4092 if (where_pad != none
4093 && (TREE_CODE (sizetree) != INTEGER_CST
4094 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4095 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4096
4097 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4098 #endif /* ARGS_GROW_DOWNWARD */
4099 }
4100
4101 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4102 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4103
4104 static void
4105 pad_to_arg_alignment (offset_ptr, boundary)
4106 struct args_size *offset_ptr;
4107 int boundary;
4108 {
4109 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4110
4111 if (boundary > BITS_PER_UNIT)
4112 {
4113 if (offset_ptr->var)
4114 {
4115 offset_ptr->var =
4116 #ifdef ARGS_GROW_DOWNWARD
4117 round_down
4118 #else
4119 round_up
4120 #endif
4121 (ARGS_SIZE_TREE (*offset_ptr),
4122 boundary / BITS_PER_UNIT);
4123 offset_ptr->constant = 0; /*?*/
4124 }
4125 else
4126 offset_ptr->constant =
4127 #ifdef ARGS_GROW_DOWNWARD
4128 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4129 #else
4130 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4131 #endif
4132 }
4133 }
4134
4135 static void
4136 pad_below (offset_ptr, passed_mode, sizetree)
4137 struct args_size *offset_ptr;
4138 enum machine_mode passed_mode;
4139 tree sizetree;
4140 {
4141 if (passed_mode != BLKmode)
4142 {
4143 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4144 offset_ptr->constant
4145 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4146 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4147 - GET_MODE_SIZE (passed_mode));
4148 }
4149 else
4150 {
4151 if (TREE_CODE (sizetree) != INTEGER_CST
4152 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4153 {
4154 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4155 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4156 /* Add it in. */
4157 ADD_PARM_SIZE (*offset_ptr, s2);
4158 SUB_PARM_SIZE (*offset_ptr, sizetree);
4159 }
4160 }
4161 }
4162
4163 static tree
4164 round_down (value, divisor)
4165 tree value;
4166 int divisor;
4167 {
4168 return size_binop (MULT_EXPR,
4169 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4170 size_int (divisor));
4171 }
4172 \f
4173 /* Walk the tree of blocks describing the binding levels within a function
4174 and warn about uninitialized variables.
4175 This is done after calling flow_analysis and before global_alloc
4176 clobbers the pseudo-regs to hard regs. */
4177
4178 void
4179 uninitialized_vars_warning (block)
4180 tree block;
4181 {
4182 register tree decl, sub;
4183 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4184 {
4185 if (TREE_CODE (decl) == VAR_DECL
4186 /* These warnings are unreliable for and aggregates
4187 because assigning the fields one by one can fail to convince
4188 flow.c that the entire aggregate was initialized.
4189 Unions are troublesome because members may be shorter. */
4190 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4191 && DECL_RTL (decl) != 0
4192 && GET_CODE (DECL_RTL (decl)) == REG
4193 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4194 warning_with_decl (decl,
4195 "`%s' might be used uninitialized in this function");
4196 if (TREE_CODE (decl) == VAR_DECL
4197 && DECL_RTL (decl) != 0
4198 && GET_CODE (DECL_RTL (decl)) == REG
4199 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4200 warning_with_decl (decl,
4201 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4202 }
4203 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4204 uninitialized_vars_warning (sub);
4205 }
4206
4207 /* Do the appropriate part of uninitialized_vars_warning
4208 but for arguments instead of local variables. */
4209
4210 void
4211 setjmp_args_warning (block)
4212 tree block;
4213 {
4214 register tree decl;
4215 for (decl = DECL_ARGUMENTS (current_function_decl);
4216 decl; decl = TREE_CHAIN (decl))
4217 if (DECL_RTL (decl) != 0
4218 && GET_CODE (DECL_RTL (decl)) == REG
4219 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4220 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4221 }
4222
4223 /* If this function call setjmp, put all vars into the stack
4224 unless they were declared `register'. */
4225
4226 void
4227 setjmp_protect (block)
4228 tree block;
4229 {
4230 register tree decl, sub;
4231 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4232 if ((TREE_CODE (decl) == VAR_DECL
4233 || TREE_CODE (decl) == PARM_DECL)
4234 && DECL_RTL (decl) != 0
4235 && GET_CODE (DECL_RTL (decl)) == REG
4236 /* If this variable came from an inline function, it must be
4237 that it's life doesn't overlap the setjmp. If there was a
4238 setjmp in the function, it would already be in memory. We
4239 must exclude such variable because their DECL_RTL might be
4240 set to strange things such as virtual_stack_vars_rtx. */
4241 && ! DECL_FROM_INLINE (decl)
4242 && (
4243 #ifdef NON_SAVING_SETJMP
4244 /* If longjmp doesn't restore the registers,
4245 don't put anything in them. */
4246 NON_SAVING_SETJMP
4247 ||
4248 #endif
4249 ! DECL_REGISTER (decl)))
4250 put_var_into_stack (decl);
4251 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4252 setjmp_protect (sub);
4253 }
4254 \f
4255 /* Like the previous function, but for args instead of local variables. */
4256
4257 void
4258 setjmp_protect_args ()
4259 {
4260 register tree decl, sub;
4261 for (decl = DECL_ARGUMENTS (current_function_decl);
4262 decl; decl = TREE_CHAIN (decl))
4263 if ((TREE_CODE (decl) == VAR_DECL
4264 || TREE_CODE (decl) == PARM_DECL)
4265 && DECL_RTL (decl) != 0
4266 && GET_CODE (DECL_RTL (decl)) == REG
4267 && (
4268 /* If longjmp doesn't restore the registers,
4269 don't put anything in them. */
4270 #ifdef NON_SAVING_SETJMP
4271 NON_SAVING_SETJMP
4272 ||
4273 #endif
4274 ! DECL_REGISTER (decl)))
4275 put_var_into_stack (decl);
4276 }
4277 \f
4278 /* Return the context-pointer register corresponding to DECL,
4279 or 0 if it does not need one. */
4280
4281 rtx
4282 lookup_static_chain (decl)
4283 tree decl;
4284 {
4285 tree context = decl_function_context (decl);
4286 tree link;
4287
4288 if (context == 0)
4289 return 0;
4290
4291 /* We treat inline_function_decl as an alias for the current function
4292 because that is the inline function whose vars, types, etc.
4293 are being merged into the current function.
4294 See expand_inline_function. */
4295 if (context == current_function_decl || context == inline_function_decl)
4296 return virtual_stack_vars_rtx;
4297
4298 for (link = context_display; link; link = TREE_CHAIN (link))
4299 if (TREE_PURPOSE (link) == context)
4300 return RTL_EXPR_RTL (TREE_VALUE (link));
4301
4302 abort ();
4303 }
4304 \f
4305 /* Convert a stack slot address ADDR for variable VAR
4306 (from a containing function)
4307 into an address valid in this function (using a static chain). */
4308
4309 rtx
4310 fix_lexical_addr (addr, var)
4311 rtx addr;
4312 tree var;
4313 {
4314 rtx basereg;
4315 int displacement;
4316 tree context = decl_function_context (var);
4317 struct function *fp;
4318 rtx base = 0;
4319
4320 /* If this is the present function, we need not do anything. */
4321 if (context == current_function_decl || context == inline_function_decl)
4322 return addr;
4323
4324 for (fp = outer_function_chain; fp; fp = fp->next)
4325 if (fp->decl == context)
4326 break;
4327
4328 if (fp == 0)
4329 abort ();
4330
4331 /* Decode given address as base reg plus displacement. */
4332 if (GET_CODE (addr) == REG)
4333 basereg = addr, displacement = 0;
4334 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4335 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4336 else
4337 abort ();
4338
4339 /* We accept vars reached via the containing function's
4340 incoming arg pointer and via its stack variables pointer. */
4341 if (basereg == fp->internal_arg_pointer)
4342 {
4343 /* If reached via arg pointer, get the arg pointer value
4344 out of that function's stack frame.
4345
4346 There are two cases: If a separate ap is needed, allocate a
4347 slot in the outer function for it and dereference it that way.
4348 This is correct even if the real ap is actually a pseudo.
4349 Otherwise, just adjust the offset from the frame pointer to
4350 compensate. */
4351
4352 #ifdef NEED_SEPARATE_AP
4353 rtx addr;
4354
4355 if (fp->arg_pointer_save_area == 0)
4356 fp->arg_pointer_save_area
4357 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4358
4359 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4360 addr = memory_address (Pmode, addr);
4361
4362 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
4363 #else
4364 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4365 base = lookup_static_chain (var);
4366 #endif
4367 }
4368
4369 else if (basereg == virtual_stack_vars_rtx)
4370 {
4371 /* This is the same code as lookup_static_chain, duplicated here to
4372 avoid an extra call to decl_function_context. */
4373 tree link;
4374
4375 for (link = context_display; link; link = TREE_CHAIN (link))
4376 if (TREE_PURPOSE (link) == context)
4377 {
4378 base = RTL_EXPR_RTL (TREE_VALUE (link));
4379 break;
4380 }
4381 }
4382
4383 if (base == 0)
4384 abort ();
4385
4386 /* Use same offset, relative to appropriate static chain or argument
4387 pointer. */
4388 return plus_constant (base, displacement);
4389 }
4390 \f
4391 /* Return the address of the trampoline for entering nested fn FUNCTION.
4392 If necessary, allocate a trampoline (in the stack frame)
4393 and emit rtl to initialize its contents (at entry to this function). */
4394
4395 rtx
4396 trampoline_address (function)
4397 tree function;
4398 {
4399 tree link;
4400 tree rtlexp;
4401 rtx tramp;
4402 struct function *fp;
4403 tree fn_context;
4404
4405 /* Find an existing trampoline and return it. */
4406 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4407 if (TREE_PURPOSE (link) == function)
4408 return
4409 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4410
4411 for (fp = outer_function_chain; fp; fp = fp->next)
4412 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4413 if (TREE_PURPOSE (link) == function)
4414 {
4415 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4416 function);
4417 return round_trampoline_addr (tramp);
4418 }
4419
4420 /* None exists; we must make one. */
4421
4422 /* Find the `struct function' for the function containing FUNCTION. */
4423 fp = 0;
4424 fn_context = decl_function_context (function);
4425 if (fn_context != current_function_decl)
4426 for (fp = outer_function_chain; fp; fp = fp->next)
4427 if (fp->decl == fn_context)
4428 break;
4429
4430 /* Allocate run-time space for this trampoline
4431 (usually in the defining function's stack frame). */
4432 #ifdef ALLOCATE_TRAMPOLINE
4433 tramp = ALLOCATE_TRAMPOLINE (fp);
4434 #else
4435 /* If rounding needed, allocate extra space
4436 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4437 #ifdef TRAMPOLINE_ALIGNMENT
4438 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
4439 #else
4440 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4441 #endif
4442 if (fp != 0)
4443 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4444 else
4445 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4446 #endif
4447
4448 /* Record the trampoline for reuse and note it for later initialization
4449 by expand_function_end. */
4450 if (fp != 0)
4451 {
4452 push_obstacks (fp->function_maybepermanent_obstack,
4453 fp->function_maybepermanent_obstack);
4454 rtlexp = make_node (RTL_EXPR);
4455 RTL_EXPR_RTL (rtlexp) = tramp;
4456 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4457 pop_obstacks ();
4458 }
4459 else
4460 {
4461 /* Make the RTL_EXPR node temporary, not momentary, so that the
4462 trampoline_list doesn't become garbage. */
4463 int momentary = suspend_momentary ();
4464 rtlexp = make_node (RTL_EXPR);
4465 resume_momentary (momentary);
4466
4467 RTL_EXPR_RTL (rtlexp) = tramp;
4468 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4469 }
4470
4471 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4472 return round_trampoline_addr (tramp);
4473 }
4474
4475 /* Given a trampoline address,
4476 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4477
4478 static rtx
4479 round_trampoline_addr (tramp)
4480 rtx tramp;
4481 {
4482 #ifdef TRAMPOLINE_ALIGNMENT
4483 /* Round address up to desired boundary. */
4484 rtx temp = gen_reg_rtx (Pmode);
4485 temp = expand_binop (Pmode, add_optab, tramp,
4486 GEN_INT (TRAMPOLINE_ALIGNMENT - 1),
4487 temp, 0, OPTAB_LIB_WIDEN);
4488 tramp = expand_binop (Pmode, and_optab, temp,
4489 GEN_INT (- TRAMPOLINE_ALIGNMENT),
4490 temp, 0, OPTAB_LIB_WIDEN);
4491 #endif
4492 return tramp;
4493 }
4494 \f
4495 /* The functions identify_blocks and reorder_blocks provide a way to
4496 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4497 duplicate portions of the RTL code. Call identify_blocks before
4498 changing the RTL, and call reorder_blocks after. */
4499
4500 /* Put all this function's BLOCK nodes into a vector, and return it.
4501 Also store in each NOTE for the beginning or end of a block
4502 the index of that block in the vector.
4503 The arguments are TOP_BLOCK, the top-level block of the function,
4504 and INSNS, the insn chain of the function. */
4505
4506 tree *
4507 identify_blocks (top_block, insns)
4508 tree top_block;
4509 rtx insns;
4510 {
4511 int n_blocks;
4512 tree *block_vector;
4513 int *block_stack;
4514 int depth = 0;
4515 int next_block_number = 0;
4516 int current_block_number = 0;
4517 rtx insn;
4518
4519 if (top_block == 0)
4520 return 0;
4521
4522 n_blocks = all_blocks (top_block, 0);
4523 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
4524 block_stack = (int *) alloca (n_blocks * sizeof (int));
4525
4526 all_blocks (top_block, block_vector);
4527
4528 for (insn = insns; insn; insn = NEXT_INSN (insn))
4529 if (GET_CODE (insn) == NOTE)
4530 {
4531 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4532 {
4533 block_stack[depth++] = current_block_number;
4534 current_block_number = next_block_number;
4535 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
4536 }
4537 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4538 {
4539 current_block_number = block_stack[--depth];
4540 NOTE_BLOCK_NUMBER (insn) = current_block_number;
4541 }
4542 }
4543
4544 return block_vector;
4545 }
4546
4547 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4548 and a revised instruction chain, rebuild the tree structure
4549 of BLOCK nodes to correspond to the new order of RTL.
4550 The new block tree is inserted below TOP_BLOCK.
4551 Returns the current top-level block. */
4552
4553 tree
4554 reorder_blocks (block_vector, top_block, insns)
4555 tree *block_vector;
4556 tree top_block;
4557 rtx insns;
4558 {
4559 tree current_block = top_block;
4560 rtx insn;
4561
4562 if (block_vector == 0)
4563 return top_block;
4564
4565 /* Prune the old tree away, so that it doesn't get in the way. */
4566 BLOCK_SUBBLOCKS (current_block) = 0;
4567
4568 for (insn = insns; insn; insn = NEXT_INSN (insn))
4569 if (GET_CODE (insn) == NOTE)
4570 {
4571 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4572 {
4573 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
4574 /* If we have seen this block before, copy it. */
4575 if (TREE_ASM_WRITTEN (block))
4576 block = copy_node (block);
4577 BLOCK_SUBBLOCKS (block) = 0;
4578 TREE_ASM_WRITTEN (block) = 1;
4579 BLOCK_SUPERCONTEXT (block) = current_block;
4580 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4581 BLOCK_SUBBLOCKS (current_block) = block;
4582 current_block = block;
4583 NOTE_SOURCE_FILE (insn) = 0;
4584 }
4585 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4586 {
4587 BLOCK_SUBBLOCKS (current_block)
4588 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4589 current_block = BLOCK_SUPERCONTEXT (current_block);
4590 NOTE_SOURCE_FILE (insn) = 0;
4591 }
4592 }
4593
4594 return current_block;
4595 }
4596
4597 /* Reverse the order of elements in the chain T of blocks,
4598 and return the new head of the chain (old last element). */
4599
4600 static tree
4601 blocks_nreverse (t)
4602 tree t;
4603 {
4604 register tree prev = 0, decl, next;
4605 for (decl = t; decl; decl = next)
4606 {
4607 next = BLOCK_CHAIN (decl);
4608 BLOCK_CHAIN (decl) = prev;
4609 prev = decl;
4610 }
4611 return prev;
4612 }
4613
4614 /* Count the subblocks of BLOCK, and list them all into the vector VECTOR.
4615 Also clear TREE_ASM_WRITTEN in all blocks. */
4616
4617 static int
4618 all_blocks (block, vector)
4619 tree block;
4620 tree *vector;
4621 {
4622 int n_blocks = 1;
4623 tree subblocks;
4624
4625 TREE_ASM_WRITTEN (block) = 0;
4626 /* Record this block. */
4627 if (vector)
4628 vector[0] = block;
4629
4630 /* Record the subblocks, and their subblocks. */
4631 for (subblocks = BLOCK_SUBBLOCKS (block);
4632 subblocks; subblocks = BLOCK_CHAIN (subblocks))
4633 n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0);
4634
4635 return n_blocks;
4636 }
4637 \f
4638 /* Build bytecode call descriptor for function SUBR. */
4639
4640 rtx
4641 bc_build_calldesc (subr)
4642 tree subr;
4643 {
4644 tree calldesc = 0, arg;
4645 int nargs = 0;
4646
4647 /* Build the argument description vector in reverse order. */
4648 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4649 nargs = 0;
4650
4651 for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg))
4652 {
4653 ++nargs;
4654
4655 calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc);
4656 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc);
4657 }
4658
4659 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4660
4661 /* Prepend the function's return type. */
4662 calldesc = tree_cons ((tree) 0,
4663 size_in_bytes (TREE_TYPE (TREE_TYPE (subr))),
4664 calldesc);
4665
4666 calldesc = tree_cons ((tree) 0,
4667 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))),
4668 calldesc);
4669
4670 /* Prepend the arg count. */
4671 calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc);
4672
4673 /* Output the call description vector and get its address. */
4674 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
4675 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
4676 build_index_type (build_int_2 (nargs * 2, 0)));
4677
4678 return output_constant_def (calldesc);
4679 }
4680
4681
4682 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4683 and initialize static variables for generating RTL for the statements
4684 of the function. */
4685
4686 void
4687 init_function_start (subr, filename, line)
4688 tree subr;
4689 char *filename;
4690 int line;
4691 {
4692 char *junk;
4693
4694 if (output_bytecode)
4695 {
4696 this_function_decl = subr;
4697 this_function_calldesc = bc_build_calldesc (subr);
4698 local_vars_size = 0;
4699 stack_depth = 0;
4700 max_stack_depth = 0;
4701 stmt_expr_depth = 0;
4702 return;
4703 }
4704
4705 init_stmt_for_function ();
4706
4707 cse_not_expected = ! optimize;
4708
4709 /* Caller save not needed yet. */
4710 caller_save_needed = 0;
4711
4712 /* No stack slots have been made yet. */
4713 stack_slot_list = 0;
4714
4715 /* There is no stack slot for handling nonlocal gotos. */
4716 nonlocal_goto_handler_slot = 0;
4717 nonlocal_goto_stack_level = 0;
4718
4719 /* No labels have been declared for nonlocal use. */
4720 nonlocal_labels = 0;
4721
4722 /* No function calls so far in this function. */
4723 function_call_count = 0;
4724
4725 /* No parm regs have been allocated.
4726 (This is important for output_inline_function.) */
4727 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4728
4729 /* Initialize the RTL mechanism. */
4730 init_emit ();
4731
4732 /* Initialize the queue of pending postincrement and postdecrements,
4733 and some other info in expr.c. */
4734 init_expr ();
4735
4736 /* We haven't done register allocation yet. */
4737 reg_renumber = 0;
4738
4739 init_const_rtx_hash_table ();
4740
4741 current_function_name = (*decl_printable_name) (subr, &junk);
4742
4743 /* Nonzero if this is a nested function that uses a static chain. */
4744
4745 current_function_needs_context
4746 = (decl_function_context (current_function_decl) != 0);
4747
4748 /* Set if a call to setjmp is seen. */
4749 current_function_calls_setjmp = 0;
4750
4751 /* Set if a call to longjmp is seen. */
4752 current_function_calls_longjmp = 0;
4753
4754 current_function_calls_alloca = 0;
4755 current_function_has_nonlocal_label = 0;
4756 current_function_has_nonlocal_goto = 0;
4757 current_function_contains_functions = 0;
4758
4759 current_function_returns_pcc_struct = 0;
4760 current_function_returns_struct = 0;
4761 current_function_epilogue_delay_list = 0;
4762 current_function_uses_const_pool = 0;
4763 current_function_uses_pic_offset_table = 0;
4764
4765 /* We have not yet needed to make a label to jump to for tail-recursion. */
4766 tail_recursion_label = 0;
4767
4768 /* We haven't had a need to make a save area for ap yet. */
4769
4770 arg_pointer_save_area = 0;
4771
4772 /* No stack slots allocated yet. */
4773 frame_offset = 0;
4774
4775 /* No SAVE_EXPRs in this function yet. */
4776 save_expr_regs = 0;
4777
4778 /* No RTL_EXPRs in this function yet. */
4779 rtl_expr_chain = 0;
4780
4781 /* We have not allocated any temporaries yet. */
4782 temp_slots = 0;
4783 temp_slot_level = 0;
4784 target_temp_slot_level = 0;
4785
4786 /* Within function body, compute a type's size as soon it is laid out. */
4787 immediate_size_expand++;
4788
4789 /* We haven't made any trampolines for this function yet. */
4790 trampoline_list = 0;
4791
4792 init_pending_stack_adjust ();
4793 inhibit_defer_pop = 0;
4794
4795 current_function_outgoing_args_size = 0;
4796
4797 /* Initialize the insn lengths. */
4798 init_insn_lengths ();
4799
4800 /* Prevent ever trying to delete the first instruction of a function.
4801 Also tell final how to output a linenum before the function prologue. */
4802 emit_line_note (filename, line);
4803
4804 /* Make sure first insn is a note even if we don't want linenums.
4805 This makes sure the first insn will never be deleted.
4806 Also, final expects a note to appear there. */
4807 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4808
4809 /* Set flags used by final.c. */
4810 if (aggregate_value_p (DECL_RESULT (subr)))
4811 {
4812 #ifdef PCC_STATIC_STRUCT_RETURN
4813 current_function_returns_pcc_struct = 1;
4814 #endif
4815 current_function_returns_struct = 1;
4816 }
4817
4818 /* Warn if this value is an aggregate type,
4819 regardless of which calling convention we are using for it. */
4820 if (warn_aggregate_return
4821 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4822 warning ("function returns an aggregate");
4823
4824 current_function_returns_pointer
4825 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
4826
4827 /* Indicate that we need to distinguish between the return value of the
4828 present function and the return value of a function being called. */
4829 rtx_equal_function_value_matters = 1;
4830
4831 /* Indicate that we have not instantiated virtual registers yet. */
4832 virtuals_instantiated = 0;
4833
4834 /* Indicate we have no need of a frame pointer yet. */
4835 frame_pointer_needed = 0;
4836
4837 /* By default assume not varargs. */
4838 current_function_varargs = 0;
4839 }
4840
4841 /* Indicate that the current function uses extra args
4842 not explicitly mentioned in the argument list in any fashion. */
4843
4844 void
4845 mark_varargs ()
4846 {
4847 current_function_varargs = 1;
4848 }
4849
4850 /* Expand a call to __main at the beginning of a possible main function. */
4851
4852 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
4853 #undef HAS_INIT_SECTION
4854 #define HAS_INIT_SECTION
4855 #endif
4856
4857 void
4858 expand_main_function ()
4859 {
4860 if (!output_bytecode)
4861 {
4862 /* The zero below avoids a possible parse error */
4863 0;
4864 #if !defined (HAS_INIT_SECTION)
4865 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
4866 VOIDmode, 0);
4867 #endif /* not HAS_INIT_SECTION */
4868 }
4869 }
4870 \f
4871 extern struct obstack permanent_obstack;
4872
4873 /* Expand start of bytecode function. See comment at
4874 expand_function_start below for details. */
4875
4876 void
4877 bc_expand_function_start (subr, parms_have_cleanups)
4878 tree subr;
4879 int parms_have_cleanups;
4880 {
4881 char label[20], *name;
4882 static int nlab;
4883 tree thisarg;
4884 int argsz;
4885
4886 if (TREE_PUBLIC (subr))
4887 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr)));
4888
4889 #ifdef DEBUG_PRINT_CODE
4890 fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr)));
4891 #endif
4892
4893 for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg))
4894 {
4895 if (DECL_RTL (thisarg))
4896 abort (); /* Should be NULL here I think. */
4897 else if (TREE_CONSTANT (DECL_SIZE (thisarg)))
4898 {
4899 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
4900 argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg));
4901 }
4902 else
4903 {
4904 /* Variable-sized objects are pointers to their storage. */
4905 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
4906 argsz += POINTER_SIZE;
4907 }
4908 }
4909
4910 bc_begin_function (bc_xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr))));
4911
4912 ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab);
4913
4914 ++nlab;
4915 name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
4916 this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0);
4917 this_function_bytecode =
4918 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo));
4919 }
4920
4921
4922 /* Expand end of bytecode function. See details the comment of
4923 expand_function_end(), below. */
4924
4925 void
4926 bc_expand_function_end ()
4927 {
4928 char *ptrconsts;
4929
4930 expand_null_return ();
4931
4932 /* Emit any fixup code. This must be done before the call to
4933 to BC_END_FUNCTION (), since that will cause the bytecode
4934 segment to be finished off and closed. */
4935
4936 expand_fixups (NULL_RTX);
4937
4938 ptrconsts = bc_end_function ();
4939
4940 bc_align_const (2 /* INT_ALIGN */);
4941
4942 /* If this changes also make sure to change bc-interp.h! */
4943
4944 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo));
4945 bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth);
4946 bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size);
4947 bc_emit_const_labelref (this_function_bytecode, 0);
4948 bc_emit_const_labelref (ptrconsts, 0);
4949 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0);
4950 }
4951
4952
4953 /* Start the RTL for a new function, and set variables used for
4954 emitting RTL.
4955 SUBR is the FUNCTION_DECL node.
4956 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4957 the function's parameters, which must be run at any return statement. */
4958
4959 void
4960 expand_function_start (subr, parms_have_cleanups)
4961 tree subr;
4962 int parms_have_cleanups;
4963 {
4964 register int i;
4965 tree tem;
4966 rtx last_ptr;
4967
4968 if (output_bytecode)
4969 {
4970 bc_expand_function_start (subr, parms_have_cleanups);
4971 return;
4972 }
4973
4974 /* Make sure volatile mem refs aren't considered
4975 valid operands of arithmetic insns. */
4976 init_recog_no_volatile ();
4977
4978 /* If function gets a static chain arg, store it in the stack frame.
4979 Do this first, so it gets the first stack slot offset. */
4980 if (current_function_needs_context)
4981 {
4982 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4983
4984 #ifdef SMALL_REGISTER_CLASSES
4985 /* Delay copying static chain if it is not a register to avoid
4986 conflicts with regs used for parameters. */
4987 if (GET_CODE (static_chain_incoming_rtx) == REG)
4988 #endif
4989 emit_move_insn (last_ptr, static_chain_incoming_rtx);
4990 }
4991
4992 /* If the parameters of this function need cleaning up, get a label
4993 for the beginning of the code which executes those cleanups. This must
4994 be done before doing anything with return_label. */
4995 if (parms_have_cleanups)
4996 cleanup_label = gen_label_rtx ();
4997 else
4998 cleanup_label = 0;
4999
5000 /* Make the label for return statements to jump to, if this machine
5001 does not have a one-instruction return and uses an epilogue,
5002 or if it returns a structure, or if it has parm cleanups. */
5003 #ifdef HAVE_return
5004 if (cleanup_label == 0 && HAVE_return
5005 && ! current_function_returns_pcc_struct
5006 && ! (current_function_returns_struct && ! optimize))
5007 return_label = 0;
5008 else
5009 return_label = gen_label_rtx ();
5010 #else
5011 return_label = gen_label_rtx ();
5012 #endif
5013
5014 /* Initialize rtx used to return the value. */
5015 /* Do this before assign_parms so that we copy the struct value address
5016 before any library calls that assign parms might generate. */
5017
5018 /* Decide whether to return the value in memory or in a register. */
5019 if (aggregate_value_p (DECL_RESULT (subr)))
5020 {
5021 /* Returning something that won't go in a register. */
5022 register rtx value_address = 0;
5023
5024 #ifdef PCC_STATIC_STRUCT_RETURN
5025 if (current_function_returns_pcc_struct)
5026 {
5027 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5028 value_address = assemble_static_space (size);
5029 }
5030 else
5031 #endif
5032 {
5033 /* Expect to be passed the address of a place to store the value.
5034 If it is passed as an argument, assign_parms will take care of
5035 it. */
5036 if (struct_value_incoming_rtx)
5037 {
5038 value_address = gen_reg_rtx (Pmode);
5039 emit_move_insn (value_address, struct_value_incoming_rtx);
5040 }
5041 }
5042 if (value_address)
5043 {
5044 DECL_RTL (DECL_RESULT (subr))
5045 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), value_address);
5046 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5047 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5048 }
5049 }
5050 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5051 /* If return mode is void, this decl rtl should not be used. */
5052 DECL_RTL (DECL_RESULT (subr)) = 0;
5053 else if (parms_have_cleanups)
5054 {
5055 /* If function will end with cleanup code for parms,
5056 compute the return values into a pseudo reg,
5057 which we will copy into the true return register
5058 after the cleanups are done. */
5059
5060 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5061
5062 #ifdef PROMOTE_FUNCTION_RETURN
5063 tree type = TREE_TYPE (DECL_RESULT (subr));
5064 int unsignedp = TREE_UNSIGNED (type);
5065
5066 mode = promote_mode (type, mode, &unsignedp, 1);
5067 #endif
5068
5069 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5070 }
5071 else
5072 /* Scalar, returned in a register. */
5073 {
5074 #ifdef FUNCTION_OUTGOING_VALUE
5075 DECL_RTL (DECL_RESULT (subr))
5076 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5077 #else
5078 DECL_RTL (DECL_RESULT (subr))
5079 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5080 #endif
5081
5082 /* Mark this reg as the function's return value. */
5083 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5084 {
5085 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5086 /* Needed because we may need to move this to memory
5087 in case it's a named return value whose address is taken. */
5088 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5089 }
5090 }
5091
5092 /* Initialize rtx for parameters and local variables.
5093 In some cases this requires emitting insns. */
5094
5095 assign_parms (subr, 0);
5096
5097 #ifdef SMALL_REGISTER_CLASSES
5098 /* Copy the static chain now if it wasn't a register. The delay is to
5099 avoid conflicts with the parameter passing registers. */
5100
5101 if (current_function_needs_context)
5102 if (GET_CODE (static_chain_incoming_rtx) != REG)
5103 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5104 #endif
5105
5106 /* The following was moved from init_function_start.
5107 The move is supposed to make sdb output more accurate. */
5108 /* Indicate the beginning of the function body,
5109 as opposed to parm setup. */
5110 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5111
5112 /* If doing stupid allocation, mark parms as born here. */
5113
5114 if (GET_CODE (get_last_insn ()) != NOTE)
5115 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5116 parm_birth_insn = get_last_insn ();
5117
5118 if (obey_regdecls)
5119 {
5120 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5121 use_variable (regno_reg_rtx[i]);
5122
5123 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5124 use_variable (current_function_internal_arg_pointer);
5125 }
5126
5127 /* Fetch static chain values for containing functions. */
5128 tem = decl_function_context (current_function_decl);
5129 /* If not doing stupid register allocation copy the static chain
5130 pointer into a pseudo. If we have small register classes, copy the
5131 value from memory if static_chain_incoming_rtx is a REG. If we do
5132 stupid register allocation, we use the stack address generated above. */
5133 if (tem && ! obey_regdecls)
5134 {
5135 #ifdef SMALL_REGISTER_CLASSES
5136 /* If the static chain originally came in a register, put it back
5137 there, then move it out in the next insn. The reason for
5138 this peculiar code is to satisfy function integration. */
5139 if (GET_CODE (static_chain_incoming_rtx) == REG)
5140 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5141 #endif
5142
5143 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5144 }
5145
5146 context_display = 0;
5147 while (tem)
5148 {
5149 tree rtlexp = make_node (RTL_EXPR);
5150
5151 RTL_EXPR_RTL (rtlexp) = last_ptr;
5152 context_display = tree_cons (tem, rtlexp, context_display);
5153 tem = decl_function_context (tem);
5154 if (tem == 0)
5155 break;
5156 /* Chain thru stack frames, assuming pointer to next lexical frame
5157 is found at the place we always store it. */
5158 #ifdef FRAME_GROWS_DOWNWARD
5159 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5160 #endif
5161 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
5162 memory_address (Pmode, last_ptr)));
5163
5164 /* If we are not optimizing, ensure that we know that this
5165 piece of context is live over the entire function. */
5166 if (! optimize)
5167 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, last_ptr,
5168 save_expr_regs);
5169 }
5170
5171 /* After the display initializations is where the tail-recursion label
5172 should go, if we end up needing one. Ensure we have a NOTE here
5173 since some things (like trampolines) get placed before this. */
5174 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5175
5176 /* Evaluate now the sizes of any types declared among the arguments. */
5177 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5178 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
5179
5180 /* Make sure there is a line number after the function entry setup code. */
5181 force_next_line_note ();
5182 }
5183 \f
5184 /* Generate RTL for the end of the current function.
5185 FILENAME and LINE are the current position in the source file.
5186
5187 It is up to language-specific callers to do cleanups for parameters--
5188 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5189
5190 void
5191 expand_function_end (filename, line, end_bindings)
5192 char *filename;
5193 int line;
5194 int end_bindings;
5195 {
5196 register int i;
5197 tree link;
5198
5199 static rtx initial_trampoline;
5200
5201 if (output_bytecode)
5202 {
5203 bc_expand_function_end ();
5204 return;
5205 }
5206
5207 #ifdef NON_SAVING_SETJMP
5208 /* Don't put any variables in registers if we call setjmp
5209 on a machine that fails to restore the registers. */
5210 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5211 {
5212 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5213 setjmp_protect (DECL_INITIAL (current_function_decl));
5214
5215 setjmp_protect_args ();
5216 }
5217 #endif
5218
5219 /* Save the argument pointer if a save area was made for it. */
5220 if (arg_pointer_save_area)
5221 {
5222 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5223 emit_insn_before (x, tail_recursion_reentry);
5224 }
5225
5226 /* Initialize any trampolines required by this function. */
5227 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5228 {
5229 tree function = TREE_PURPOSE (link);
5230 rtx context = lookup_static_chain (function);
5231 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5232 rtx seq;
5233
5234 /* First make sure this compilation has a template for
5235 initializing trampolines. */
5236 if (initial_trampoline == 0)
5237 {
5238 end_temporary_allocation ();
5239 initial_trampoline
5240 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
5241 resume_temporary_allocation ();
5242 }
5243
5244 /* Generate insns to initialize the trampoline. */
5245 start_sequence ();
5246 tramp = change_address (initial_trampoline, BLKmode,
5247 round_trampoline_addr (XEXP (tramp, 0)));
5248 emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE),
5249 FUNCTION_BOUNDARY / BITS_PER_UNIT);
5250 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
5251 XEXP (DECL_RTL (function), 0), context);
5252 seq = get_insns ();
5253 end_sequence ();
5254
5255 /* Put those insns at entry to the containing function (this one). */
5256 emit_insns_before (seq, tail_recursion_reentry);
5257 }
5258
5259 /* Warn about unused parms if extra warnings were specified. */
5260 if (warn_unused && extra_warnings)
5261 {
5262 tree decl;
5263
5264 for (decl = DECL_ARGUMENTS (current_function_decl);
5265 decl; decl = TREE_CHAIN (decl))
5266 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5267 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5268 warning_with_decl (decl, "unused parameter `%s'");
5269 }
5270
5271 /* Delete handlers for nonlocal gotos if nothing uses them. */
5272 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5273 delete_handlers ();
5274
5275 /* End any sequences that failed to be closed due to syntax errors. */
5276 while (in_sequence_p ())
5277 end_sequence ();
5278
5279 /* Outside function body, can't compute type's actual size
5280 until next function's body starts. */
5281 immediate_size_expand--;
5282
5283 /* If doing stupid register allocation,
5284 mark register parms as dying here. */
5285
5286 if (obey_regdecls)
5287 {
5288 rtx tem;
5289 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5290 use_variable (regno_reg_rtx[i]);
5291
5292 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5293
5294 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5295 {
5296 use_variable (XEXP (tem, 0));
5297 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5298 }
5299
5300 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5301 use_variable (current_function_internal_arg_pointer);
5302 }
5303
5304 clear_pending_stack_adjust ();
5305 do_pending_stack_adjust ();
5306
5307 /* Mark the end of the function body.
5308 If control reaches this insn, the function can drop through
5309 without returning a value. */
5310 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5311
5312 /* Output a linenumber for the end of the function.
5313 SDB depends on this. */
5314 emit_line_note_force (filename, line);
5315
5316 /* Output the label for the actual return from the function,
5317 if one is expected. This happens either because a function epilogue
5318 is used instead of a return instruction, or because a return was done
5319 with a goto in order to run local cleanups, or because of pcc-style
5320 structure returning. */
5321
5322 if (return_label)
5323 emit_label (return_label);
5324
5325 /* C++ uses this. */
5326 if (end_bindings)
5327 expand_end_bindings (0, 0, 0);
5328
5329 /* If we had calls to alloca, and this machine needs
5330 an accurate stack pointer to exit the function,
5331 insert some code to save and restore the stack pointer. */
5332 #ifdef EXIT_IGNORE_STACK
5333 if (! EXIT_IGNORE_STACK)
5334 #endif
5335 if (current_function_calls_alloca)
5336 {
5337 rtx tem = 0;
5338
5339 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5340 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5341 }
5342
5343 /* If scalar return value was computed in a pseudo-reg,
5344 copy that to the hard return register. */
5345 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5346 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5347 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5348 >= FIRST_PSEUDO_REGISTER))
5349 {
5350 rtx real_decl_result;
5351
5352 #ifdef FUNCTION_OUTGOING_VALUE
5353 real_decl_result
5354 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5355 current_function_decl);
5356 #else
5357 real_decl_result
5358 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5359 current_function_decl);
5360 #endif
5361 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5362 emit_move_insn (real_decl_result,
5363 DECL_RTL (DECL_RESULT (current_function_decl)));
5364 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
5365 }
5366
5367 /* If returning a structure, arrange to return the address of the value
5368 in a place where debuggers expect to find it.
5369
5370 If returning a structure PCC style,
5371 the caller also depends on this value.
5372 And current_function_returns_pcc_struct is not necessarily set. */
5373 if (current_function_returns_struct
5374 || current_function_returns_pcc_struct)
5375 {
5376 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5377 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5378 #ifdef FUNCTION_OUTGOING_VALUE
5379 rtx outgoing
5380 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5381 current_function_decl);
5382 #else
5383 rtx outgoing
5384 = FUNCTION_VALUE (build_pointer_type (type),
5385 current_function_decl);
5386 #endif
5387
5388 /* Mark this as a function return value so integrate will delete the
5389 assignment and USE below when inlining this function. */
5390 REG_FUNCTION_VALUE_P (outgoing) = 1;
5391
5392 emit_move_insn (outgoing, value_address);
5393 use_variable (outgoing);
5394 }
5395
5396 /* Output a return insn if we are using one.
5397 Otherwise, let the rtl chain end here, to drop through
5398 into the epilogue. */
5399
5400 #ifdef HAVE_return
5401 if (HAVE_return)
5402 {
5403 emit_jump_insn (gen_return ());
5404 emit_barrier ();
5405 }
5406 #endif
5407
5408 /* Fix up any gotos that jumped out to the outermost
5409 binding level of the function.
5410 Must follow emitting RETURN_LABEL. */
5411
5412 /* If you have any cleanups to do at this point,
5413 and they need to create temporary variables,
5414 then you will lose. */
5415 expand_fixups (get_insns ());
5416 }
5417 \f
5418 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5419
5420 static int *prologue;
5421 static int *epilogue;
5422
5423 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5424 or a single insn). */
5425
5426 static int *
5427 record_insns (insns)
5428 rtx insns;
5429 {
5430 int *vec;
5431
5432 if (GET_CODE (insns) == SEQUENCE)
5433 {
5434 int len = XVECLEN (insns, 0);
5435 vec = (int *) oballoc ((len + 1) * sizeof (int));
5436 vec[len] = 0;
5437 while (--len >= 0)
5438 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5439 }
5440 else
5441 {
5442 vec = (int *) oballoc (2 * sizeof (int));
5443 vec[0] = INSN_UID (insns);
5444 vec[1] = 0;
5445 }
5446 return vec;
5447 }
5448
5449 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5450
5451 static int
5452 contains (insn, vec)
5453 rtx insn;
5454 int *vec;
5455 {
5456 register int i, j;
5457
5458 if (GET_CODE (insn) == INSN
5459 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5460 {
5461 int count = 0;
5462 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5463 for (j = 0; vec[j]; j++)
5464 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5465 count++;
5466 return count;
5467 }
5468 else
5469 {
5470 for (j = 0; vec[j]; j++)
5471 if (INSN_UID (insn) == vec[j])
5472 return 1;
5473 }
5474 return 0;
5475 }
5476
5477 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5478 this into place with notes indicating where the prologue ends and where
5479 the epilogue begins. Update the basic block information when possible. */
5480
5481 void
5482 thread_prologue_and_epilogue_insns (f)
5483 rtx f;
5484 {
5485 #ifdef HAVE_prologue
5486 if (HAVE_prologue)
5487 {
5488 rtx head, seq, insn;
5489
5490 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5491 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5492 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5493 seq = gen_prologue ();
5494 head = emit_insn_after (seq, f);
5495
5496 /* Include the new prologue insns in the first block. Ignore them
5497 if they form a basic block unto themselves. */
5498 if (basic_block_head && n_basic_blocks
5499 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5500 basic_block_head[0] = NEXT_INSN (f);
5501
5502 /* Retain a map of the prologue insns. */
5503 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5504 }
5505 else
5506 #endif
5507 prologue = 0;
5508
5509 #ifdef HAVE_epilogue
5510 if (HAVE_epilogue)
5511 {
5512 rtx insn = get_last_insn ();
5513 rtx prev = prev_nonnote_insn (insn);
5514
5515 /* If we end with a BARRIER, we don't need an epilogue. */
5516 if (! (prev && GET_CODE (prev) == BARRIER))
5517 {
5518 rtx tail, seq, tem;
5519 rtx first_use = 0;
5520 rtx last_use = 0;
5521
5522 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5523 epilogue insns, the USE insns at the end of a function,
5524 the jump insn that returns, and then a BARRIER. */
5525
5526 /* Move the USE insns at the end of a function onto a list. */
5527 while (prev
5528 && GET_CODE (prev) == INSN
5529 && GET_CODE (PATTERN (prev)) == USE)
5530 {
5531 tem = prev;
5532 prev = prev_nonnote_insn (prev);
5533
5534 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5535 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5536 if (first_use)
5537 {
5538 NEXT_INSN (tem) = first_use;
5539 PREV_INSN (first_use) = tem;
5540 }
5541 first_use = tem;
5542 if (!last_use)
5543 last_use = tem;
5544 }
5545
5546 emit_barrier_after (insn);
5547
5548 seq = gen_epilogue ();
5549 tail = emit_jump_insn_after (seq, insn);
5550
5551 /* Insert the USE insns immediately before the return insn, which
5552 must be the first instruction before the final barrier. */
5553 if (first_use)
5554 {
5555 tem = prev_nonnote_insn (get_last_insn ());
5556 NEXT_INSN (PREV_INSN (tem)) = first_use;
5557 PREV_INSN (first_use) = PREV_INSN (tem);
5558 PREV_INSN (tem) = last_use;
5559 NEXT_INSN (last_use) = tem;
5560 }
5561
5562 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5563
5564 /* Include the new epilogue insns in the last block. Ignore
5565 them if they form a basic block unto themselves. */
5566 if (basic_block_end && n_basic_blocks
5567 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5568 basic_block_end[n_basic_blocks - 1] = tail;
5569
5570 /* Retain a map of the epilogue insns. */
5571 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5572 return;
5573 }
5574 }
5575 #endif
5576 epilogue = 0;
5577 }
5578
5579 /* Reposition the prologue-end and epilogue-begin notes after instruction
5580 scheduling and delayed branch scheduling. */
5581
5582 void
5583 reposition_prologue_and_epilogue_notes (f)
5584 rtx f;
5585 {
5586 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5587 /* Reposition the prologue and epilogue notes. */
5588 if (n_basic_blocks)
5589 {
5590 rtx next, prev;
5591 int len;
5592
5593 if (prologue)
5594 {
5595 register rtx insn, note = 0;
5596
5597 /* Scan from the beginning until we reach the last prologue insn.
5598 We apparently can't depend on basic_block_{head,end} after
5599 reorg has run. */
5600 for (len = 0; prologue[len]; len++)
5601 ;
5602 for (insn = f; len && insn; insn = NEXT_INSN (insn))
5603 {
5604 if (GET_CODE (insn) == NOTE)
5605 {
5606 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5607 note = insn;
5608 }
5609 else if ((len -= contains (insn, prologue)) == 0)
5610 {
5611 /* Find the prologue-end note if we haven't already, and
5612 move it to just after the last prologue insn. */
5613 if (note == 0)
5614 {
5615 for (note = insn; note = NEXT_INSN (note);)
5616 if (GET_CODE (note) == NOTE
5617 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5618 break;
5619 }
5620 next = NEXT_INSN (note);
5621 prev = PREV_INSN (note);
5622 if (prev)
5623 NEXT_INSN (prev) = next;
5624 if (next)
5625 PREV_INSN (next) = prev;
5626 add_insn_after (note, insn);
5627 }
5628 }
5629 }
5630
5631 if (epilogue)
5632 {
5633 register rtx insn, note = 0;
5634
5635 /* Scan from the end until we reach the first epilogue insn.
5636 We apparently can't depend on basic_block_{head,end} after
5637 reorg has run. */
5638 for (len = 0; epilogue[len]; len++)
5639 ;
5640 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
5641 {
5642 if (GET_CODE (insn) == NOTE)
5643 {
5644 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5645 note = insn;
5646 }
5647 else if ((len -= contains (insn, epilogue)) == 0)
5648 {
5649 /* Find the epilogue-begin note if we haven't already, and
5650 move it to just before the first epilogue insn. */
5651 if (note == 0)
5652 {
5653 for (note = insn; note = PREV_INSN (note);)
5654 if (GET_CODE (note) == NOTE
5655 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5656 break;
5657 }
5658 next = NEXT_INSN (note);
5659 prev = PREV_INSN (note);
5660 if (prev)
5661 NEXT_INSN (prev) = next;
5662 if (next)
5663 PREV_INSN (next) = prev;
5664 add_insn_after (note, PREV_INSN (insn));
5665 }
5666 }
5667 }
5668 }
5669 #endif /* HAVE_prologue or HAVE_epilogue */
5670 }
This page took 0.28409 seconds and 5 git commands to generate.