]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
flow.c: Update comment.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59
60 #ifndef TRAMPOLINE_ALIGNMENT
61 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
62 #endif
63
64 /* Some systems use __main in a way incompatible with its use in gcc, in these
65 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
66 give the same symbol without quotes for an alternative entry point. You
67 must define both, or neither. */
68 #ifndef NAME__MAIN
69 #define NAME__MAIN "__main"
70 #define SYMBOL__MAIN __main
71 #endif
72
73 /* Round a value to the lowest integer less than it that is a multiple of
74 the required alignment. Avoid using division in case the value is
75 negative. Assume the alignment is a power of two. */
76 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
77
78 /* Similar, but round to the next highest integer that meets the
79 alignment. */
80 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
81
82 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
83 during rtl generation. If they are different register numbers, this is
84 always true. It may also be true if
85 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
86 generation. See fix_lexical_addr for details. */
87
88 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
89 #define NEED_SEPARATE_AP
90 #endif
91
92 /* Number of bytes of args popped by function being compiled on its return.
93 Zero if no bytes are to be popped.
94 May affect compilation of return insn or of function epilogue. */
95
96 int current_function_pops_args;
97
98 /* Nonzero if function being compiled needs to be given an address
99 where the value should be stored. */
100
101 int current_function_returns_struct;
102
103 /* Nonzero if function being compiled needs to
104 return the address of where it has put a structure value. */
105
106 int current_function_returns_pcc_struct;
107
108 /* Nonzero if function being compiled needs to be passed a static chain. */
109
110 int current_function_needs_context;
111
112 /* Nonzero if function being compiled can call setjmp. */
113
114 int current_function_calls_setjmp;
115
116 /* Nonzero if function being compiled can call longjmp. */
117
118 int current_function_calls_longjmp;
119
120 /* Nonzero if function being compiled receives nonlocal gotos
121 from nested functions. */
122
123 int current_function_has_nonlocal_label;
124
125 /* Nonzero if function being compiled has nonlocal gotos to parent
126 function. */
127
128 int current_function_has_nonlocal_goto;
129
130 /* Nonzero if this function has a computed goto.
131
132 It is computed during find_basic_blocks or during stupid life
133 analysis. */
134
135 int current_function_has_computed_jump;
136
137 /* Nonzero if function being compiled contains nested functions. */
138
139 int current_function_contains_functions;
140
141 /* Nonzero if function being compiled doesn't modify the stack pointer
142 (ignoring the prologue and epilogue). This is only valid after
143 life_analysis has run. */
144
145 int current_function_sp_is_unchanging;
146
147 /* Nonzero if the current function is a thunk (a lightweight function that
148 just adjusts one of its arguments and forwards to another function), so
149 we should try to cut corners where we can. */
150 int current_function_is_thunk;
151
152 /* Nonzero if function being compiled can call alloca,
153 either as a subroutine or builtin. */
154
155 int current_function_calls_alloca;
156
157 /* Nonzero if the current function returns a pointer type */
158
159 int current_function_returns_pointer;
160
161 /* If some insns can be deferred to the delay slots of the epilogue, the
162 delay list for them is recorded here. */
163
164 rtx current_function_epilogue_delay_list;
165
166 /* If function's args have a fixed size, this is that size, in bytes.
167 Otherwise, it is -1.
168 May affect compilation of return insn or of function epilogue. */
169
170 int current_function_args_size;
171
172 /* # bytes the prologue should push and pretend that the caller pushed them.
173 The prologue must do this, but only if parms can be passed in registers. */
174
175 int current_function_pretend_args_size;
176
177 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
178 defined, the needed space is pushed by the prologue. */
179
180 int current_function_outgoing_args_size;
181
182 /* This is the offset from the arg pointer to the place where the first
183 anonymous arg can be found, if there is one. */
184
185 rtx current_function_arg_offset_rtx;
186
187 /* Nonzero if current function uses varargs.h or equivalent.
188 Zero for functions that use stdarg.h. */
189
190 int current_function_varargs;
191
192 /* Nonzero if current function uses stdarg.h or equivalent.
193 Zero for functions that use varargs.h. */
194
195 int current_function_stdarg;
196
197 /* Quantities of various kinds of registers
198 used for the current function's args. */
199
200 CUMULATIVE_ARGS current_function_args_info;
201
202 /* Name of function now being compiled. */
203
204 char *current_function_name;
205
206 /* If non-zero, an RTL expression for the location at which the current
207 function returns its result. If the current function returns its
208 result in a register, current_function_return_rtx will always be
209 the hard register containing the result. */
210
211 rtx current_function_return_rtx;
212
213 /* Nonzero if the current function uses the constant pool. */
214
215 int current_function_uses_const_pool;
216
217 /* Nonzero if the current function uses pic_offset_table_rtx. */
218 int current_function_uses_pic_offset_table;
219
220 /* The arg pointer hard register, or the pseudo into which it was copied. */
221 rtx current_function_internal_arg_pointer;
222
223 /* Language-specific reason why the current function cannot be made inline. */
224 char *current_function_cannot_inline;
225
226 /* Nonzero if instrumentation calls for function entry and exit should be
227 generated. */
228 int current_function_instrument_entry_exit;
229
230 /* Nonzero if memory access checking be enabled in the current function. */
231 int current_function_check_memory_usage;
232
233 /* The FUNCTION_DECL for an inline function currently being expanded. */
234 tree inline_function_decl;
235
236 /* Number of function calls seen so far in current function. */
237
238 int function_call_count;
239
240 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
241 (labels to which there can be nonlocal gotos from nested functions)
242 in this function. */
243
244 tree nonlocal_labels;
245
246 /* RTX for stack slot that holds the current handler for nonlocal gotos.
247 Zero when function does not have nonlocal labels. */
248
249 rtx nonlocal_goto_handler_slot;
250
251 /* RTX for stack slot that holds the stack pointer value to restore
252 for a nonlocal goto.
253 Zero when function does not have nonlocal labels. */
254
255 rtx nonlocal_goto_stack_level;
256
257 /* Label that will go on parm cleanup code, if any.
258 Jumping to this label runs cleanup code for parameters, if
259 such code must be run. Following this code is the logical return label. */
260
261 rtx cleanup_label;
262
263 /* Label that will go on function epilogue.
264 Jumping to this label serves as a "return" instruction
265 on machines which require execution of the epilogue on all returns. */
266
267 rtx return_label;
268
269 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
270 So we can mark them all live at the end of the function, if nonopt. */
271 rtx save_expr_regs;
272
273 /* List (chain of EXPR_LISTs) of all stack slots in this function.
274 Made for the sake of unshare_all_rtl. */
275 rtx stack_slot_list;
276
277 /* Chain of all RTL_EXPRs that have insns in them. */
278 tree rtl_expr_chain;
279
280 /* Label to jump back to for tail recursion, or 0 if we have
281 not yet needed one for this function. */
282 rtx tail_recursion_label;
283
284 /* Place after which to insert the tail_recursion_label if we need one. */
285 rtx tail_recursion_reentry;
286
287 /* Location at which to save the argument pointer if it will need to be
288 referenced. There are two cases where this is done: if nonlocal gotos
289 exist, or if vars stored at an offset from the argument pointer will be
290 needed by inner routines. */
291
292 rtx arg_pointer_save_area;
293
294 /* Offset to end of allocated area of stack frame.
295 If stack grows down, this is the address of the last stack slot allocated.
296 If stack grows up, this is the address for the next slot. */
297 HOST_WIDE_INT frame_offset;
298
299 /* List (chain of TREE_LISTs) of static chains for containing functions.
300 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
301 in an RTL_EXPR in the TREE_VALUE. */
302 static tree context_display;
303
304 /* List (chain of TREE_LISTs) of trampolines for nested functions.
305 The trampoline sets up the static chain and jumps to the function.
306 We supply the trampoline's address when the function's address is requested.
307
308 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
309 in an RTL_EXPR in the TREE_VALUE. */
310 static tree trampoline_list;
311
312 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
313 static rtx parm_birth_insn;
314
315 #if 0
316 /* Nonzero if a stack slot has been generated whose address is not
317 actually valid. It means that the generated rtl must all be scanned
318 to detect and correct the invalid addresses where they occur. */
319 static int invalid_stack_slot;
320 #endif
321
322 /* Last insn of those whose job was to put parms into their nominal homes. */
323 static rtx last_parm_insn;
324
325 /* 1 + last pseudo register number possibly used for loading a copy
326 of a parameter of this function. */
327 int max_parm_reg;
328
329 /* Vector indexed by REGNO, containing location on stack in which
330 to put the parm which is nominally in pseudo register REGNO,
331 if we discover that that parm must go in the stack. The highest
332 element in this vector is one less than MAX_PARM_REG, above. */
333 rtx *parm_reg_stack_loc;
334
335 /* Nonzero once virtual register instantiation has been done.
336 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
337 static int virtuals_instantiated;
338
339 /* These variables hold pointers to functions to
340 save and restore machine-specific data,
341 in push_function_context and pop_function_context. */
342 void (*save_machine_status) PROTO((struct function *));
343 void (*restore_machine_status) PROTO((struct function *));
344
345 /* Nonzero if we need to distinguish between the return value of this function
346 and the return value of a function called by this function. This helps
347 integrate.c */
348
349 extern int rtx_equal_function_value_matters;
350 extern tree sequence_rtl_expr;
351 \f
352 /* In order to evaluate some expressions, such as function calls returning
353 structures in memory, we need to temporarily allocate stack locations.
354 We record each allocated temporary in the following structure.
355
356 Associated with each temporary slot is a nesting level. When we pop up
357 one level, all temporaries associated with the previous level are freed.
358 Normally, all temporaries are freed after the execution of the statement
359 in which they were created. However, if we are inside a ({...}) grouping,
360 the result may be in a temporary and hence must be preserved. If the
361 result could be in a temporary, we preserve it if we can determine which
362 one it is in. If we cannot determine which temporary may contain the
363 result, all temporaries are preserved. A temporary is preserved by
364 pretending it was allocated at the previous nesting level.
365
366 Automatic variables are also assigned temporary slots, at the nesting
367 level where they are defined. They are marked a "kept" so that
368 free_temp_slots will not free them. */
369
370 struct temp_slot
371 {
372 /* Points to next temporary slot. */
373 struct temp_slot *next;
374 /* The rtx to used to reference the slot. */
375 rtx slot;
376 /* The rtx used to represent the address if not the address of the
377 slot above. May be an EXPR_LIST if multiple addresses exist. */
378 rtx address;
379 /* The size, in units, of the slot. */
380 HOST_WIDE_INT size;
381 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
382 tree rtl_expr;
383 /* Non-zero if this temporary is currently in use. */
384 char in_use;
385 /* Non-zero if this temporary has its address taken. */
386 char addr_taken;
387 /* Nesting level at which this slot is being used. */
388 int level;
389 /* Non-zero if this should survive a call to free_temp_slots. */
390 int keep;
391 /* The offset of the slot from the frame_pointer, including extra space
392 for alignment. This info is for combine_temp_slots. */
393 HOST_WIDE_INT base_offset;
394 /* The size of the slot, including extra space for alignment. This
395 info is for combine_temp_slots. */
396 HOST_WIDE_INT full_size;
397 };
398
399 /* List of all temporaries allocated, both available and in use. */
400
401 struct temp_slot *temp_slots;
402
403 /* Current nesting level for temporaries. */
404
405 int temp_slot_level;
406
407 /* Current nesting level for variables in a block. */
408
409 int var_temp_slot_level;
410
411 /* When temporaries are created by TARGET_EXPRs, they are created at
412 this level of temp_slot_level, so that they can remain allocated
413 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
414 of TARGET_EXPRs. */
415 int target_temp_slot_level;
416 \f
417 /* This structure is used to record MEMs or pseudos used to replace VAR, any
418 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
419 maintain this list in case two operands of an insn were required to match;
420 in that case we must ensure we use the same replacement. */
421
422 struct fixup_replacement
423 {
424 rtx old;
425 rtx new;
426 struct fixup_replacement *next;
427 };
428
429 /* Forward declarations. */
430
431 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
432 int, struct function *));
433 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
434 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
435 enum machine_mode, enum machine_mode,
436 int, int, int));
437 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
438 static struct fixup_replacement
439 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
440 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
441 rtx, int));
442 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
443 struct fixup_replacement **));
444 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
445 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
446 static rtx fixup_stack_1 PROTO((rtx, rtx));
447 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
448 static void instantiate_decls PROTO((tree, int));
449 static void instantiate_decls_1 PROTO((tree, int));
450 static void instantiate_decl PROTO((rtx, int, int));
451 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
452 static void delete_handlers PROTO((void));
453 static void pad_to_arg_alignment PROTO((struct args_size *, int));
454 #ifndef ARGS_GROW_DOWNWARD
455 static void pad_below PROTO((struct args_size *, enum machine_mode,
456 tree));
457 #endif
458 #ifdef ARGS_GROW_DOWNWARD
459 static tree round_down PROTO((tree, int));
460 #endif
461 static rtx round_trampoline_addr PROTO((rtx));
462 static tree blocks_nreverse PROTO((tree));
463 static int all_blocks PROTO((tree, tree *));
464 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
465 static int *record_insns PROTO((rtx));
466 static int contains PROTO((rtx, int *));
467 #endif /* HAVE_prologue || HAVE_epilogue */
468 static void put_addressof_into_stack PROTO((rtx));
469 static void purge_addressof_1 PROTO((rtx *, rtx, int, int));
470 \f
471 /* Pointer to chain of `struct function' for containing functions. */
472 struct function *outer_function_chain;
473
474 /* Given a function decl for a containing function,
475 return the `struct function' for it. */
476
477 struct function *
478 find_function_data (decl)
479 tree decl;
480 {
481 struct function *p;
482
483 for (p = outer_function_chain; p; p = p->next)
484 if (p->decl == decl)
485 return p;
486
487 abort ();
488 }
489
490 /* Save the current context for compilation of a nested function.
491 This is called from language-specific code.
492 The caller is responsible for saving any language-specific status,
493 since this function knows only about language-independent variables. */
494
495 void
496 push_function_context_to (context)
497 tree context;
498 {
499 struct function *p = (struct function *) xmalloc (sizeof (struct function));
500
501 p->next = outer_function_chain;
502 outer_function_chain = p;
503
504 p->name = current_function_name;
505 p->decl = current_function_decl;
506 p->pops_args = current_function_pops_args;
507 p->returns_struct = current_function_returns_struct;
508 p->returns_pcc_struct = current_function_returns_pcc_struct;
509 p->returns_pointer = current_function_returns_pointer;
510 p->needs_context = current_function_needs_context;
511 p->calls_setjmp = current_function_calls_setjmp;
512 p->calls_longjmp = current_function_calls_longjmp;
513 p->calls_alloca = current_function_calls_alloca;
514 p->has_nonlocal_label = current_function_has_nonlocal_label;
515 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
516 p->contains_functions = current_function_contains_functions;
517 p->is_thunk = current_function_is_thunk;
518 p->args_size = current_function_args_size;
519 p->pretend_args_size = current_function_pretend_args_size;
520 p->arg_offset_rtx = current_function_arg_offset_rtx;
521 p->varargs = current_function_varargs;
522 p->stdarg = current_function_stdarg;
523 p->uses_const_pool = current_function_uses_const_pool;
524 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
525 p->internal_arg_pointer = current_function_internal_arg_pointer;
526 p->cannot_inline = current_function_cannot_inline;
527 p->max_parm_reg = max_parm_reg;
528 p->parm_reg_stack_loc = parm_reg_stack_loc;
529 p->outgoing_args_size = current_function_outgoing_args_size;
530 p->return_rtx = current_function_return_rtx;
531 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
532 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
533 p->nonlocal_labels = nonlocal_labels;
534 p->cleanup_label = cleanup_label;
535 p->return_label = return_label;
536 p->save_expr_regs = save_expr_regs;
537 p->stack_slot_list = stack_slot_list;
538 p->parm_birth_insn = parm_birth_insn;
539 p->frame_offset = frame_offset;
540 p->tail_recursion_label = tail_recursion_label;
541 p->tail_recursion_reentry = tail_recursion_reentry;
542 p->arg_pointer_save_area = arg_pointer_save_area;
543 p->rtl_expr_chain = rtl_expr_chain;
544 p->last_parm_insn = last_parm_insn;
545 p->context_display = context_display;
546 p->trampoline_list = trampoline_list;
547 p->function_call_count = function_call_count;
548 p->temp_slots = temp_slots;
549 p->temp_slot_level = temp_slot_level;
550 p->target_temp_slot_level = target_temp_slot_level;
551 p->var_temp_slot_level = var_temp_slot_level;
552 p->fixup_var_refs_queue = 0;
553 p->epilogue_delay_list = current_function_epilogue_delay_list;
554 p->args_info = current_function_args_info;
555 p->check_memory_usage = current_function_check_memory_usage;
556 p->instrument_entry_exit = current_function_instrument_entry_exit;
557
558 save_tree_status (p, context);
559 save_storage_status (p);
560 save_emit_status (p);
561 save_expr_status (p);
562 save_stmt_status (p);
563 save_varasm_status (p, context);
564 if (save_machine_status)
565 (*save_machine_status) (p);
566 }
567
568 void
569 push_function_context ()
570 {
571 push_function_context_to (current_function_decl);
572 }
573
574 /* Restore the last saved context, at the end of a nested function.
575 This function is called from language-specific code. */
576
577 void
578 pop_function_context_from (context)
579 tree context;
580 {
581 struct function *p = outer_function_chain;
582 struct var_refs_queue *queue;
583
584 outer_function_chain = p->next;
585
586 current_function_contains_functions
587 = p->contains_functions || p->inline_obstacks
588 || context == current_function_decl;
589 current_function_name = p->name;
590 current_function_decl = p->decl;
591 current_function_pops_args = p->pops_args;
592 current_function_returns_struct = p->returns_struct;
593 current_function_returns_pcc_struct = p->returns_pcc_struct;
594 current_function_returns_pointer = p->returns_pointer;
595 current_function_needs_context = p->needs_context;
596 current_function_calls_setjmp = p->calls_setjmp;
597 current_function_calls_longjmp = p->calls_longjmp;
598 current_function_calls_alloca = p->calls_alloca;
599 current_function_has_nonlocal_label = p->has_nonlocal_label;
600 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
601 current_function_is_thunk = p->is_thunk;
602 current_function_args_size = p->args_size;
603 current_function_pretend_args_size = p->pretend_args_size;
604 current_function_arg_offset_rtx = p->arg_offset_rtx;
605 current_function_varargs = p->varargs;
606 current_function_stdarg = p->stdarg;
607 current_function_uses_const_pool = p->uses_const_pool;
608 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
609 current_function_internal_arg_pointer = p->internal_arg_pointer;
610 current_function_cannot_inline = p->cannot_inline;
611 max_parm_reg = p->max_parm_reg;
612 parm_reg_stack_loc = p->parm_reg_stack_loc;
613 current_function_outgoing_args_size = p->outgoing_args_size;
614 current_function_return_rtx = p->return_rtx;
615 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
616 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
617 nonlocal_labels = p->nonlocal_labels;
618 cleanup_label = p->cleanup_label;
619 return_label = p->return_label;
620 save_expr_regs = p->save_expr_regs;
621 stack_slot_list = p->stack_slot_list;
622 parm_birth_insn = p->parm_birth_insn;
623 frame_offset = p->frame_offset;
624 tail_recursion_label = p->tail_recursion_label;
625 tail_recursion_reentry = p->tail_recursion_reentry;
626 arg_pointer_save_area = p->arg_pointer_save_area;
627 rtl_expr_chain = p->rtl_expr_chain;
628 last_parm_insn = p->last_parm_insn;
629 context_display = p->context_display;
630 trampoline_list = p->trampoline_list;
631 function_call_count = p->function_call_count;
632 temp_slots = p->temp_slots;
633 temp_slot_level = p->temp_slot_level;
634 target_temp_slot_level = p->target_temp_slot_level;
635 var_temp_slot_level = p->var_temp_slot_level;
636 current_function_epilogue_delay_list = p->epilogue_delay_list;
637 reg_renumber = 0;
638 current_function_args_info = p->args_info;
639 current_function_check_memory_usage = p->check_memory_usage;
640 current_function_instrument_entry_exit = p->instrument_entry_exit;
641
642 restore_tree_status (p, context);
643 restore_storage_status (p);
644 restore_expr_status (p);
645 restore_emit_status (p);
646 restore_stmt_status (p);
647 restore_varasm_status (p);
648
649 if (restore_machine_status)
650 (*restore_machine_status) (p);
651
652 /* Finish doing put_var_into_stack for any of our variables
653 which became addressable during the nested function. */
654 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
655 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
656
657 free (p);
658
659 /* Reset variables that have known state during rtx generation. */
660 rtx_equal_function_value_matters = 1;
661 virtuals_instantiated = 0;
662 }
663
664 void pop_function_context ()
665 {
666 pop_function_context_from (current_function_decl);
667 }
668 \f
669 /* Allocate fixed slots in the stack frame of the current function. */
670
671 /* Return size needed for stack frame based on slots so far allocated.
672 This size counts from zero. It is not rounded to STACK_BOUNDARY;
673 the caller may have to do that. */
674
675 HOST_WIDE_INT
676 get_frame_size ()
677 {
678 #ifdef FRAME_GROWS_DOWNWARD
679 return -frame_offset;
680 #else
681 return frame_offset;
682 #endif
683 }
684
685 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
686 with machine mode MODE.
687
688 ALIGN controls the amount of alignment for the address of the slot:
689 0 means according to MODE,
690 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
691 positive specifies alignment boundary in bits.
692
693 We do not round to stack_boundary here. */
694
695 rtx
696 assign_stack_local (mode, size, align)
697 enum machine_mode mode;
698 HOST_WIDE_INT size;
699 int align;
700 {
701 register rtx x, addr;
702 int bigend_correction = 0;
703 int alignment;
704
705 if (align == 0)
706 {
707 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
708 if (mode == BLKmode)
709 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
710 }
711 else if (align == -1)
712 {
713 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
714 size = CEIL_ROUND (size, alignment);
715 }
716 else
717 alignment = align / BITS_PER_UNIT;
718
719 /* Round frame offset to that alignment.
720 We must be careful here, since FRAME_OFFSET might be negative and
721 division with a negative dividend isn't as well defined as we might
722 like. So we instead assume that ALIGNMENT is a power of two and
723 use logical operations which are unambiguous. */
724 #ifdef FRAME_GROWS_DOWNWARD
725 frame_offset = FLOOR_ROUND (frame_offset, alignment);
726 #else
727 frame_offset = CEIL_ROUND (frame_offset, alignment);
728 #endif
729
730 /* On a big-endian machine, if we are allocating more space than we will use,
731 use the least significant bytes of those that are allocated. */
732 if (BYTES_BIG_ENDIAN && mode != BLKmode)
733 bigend_correction = size - GET_MODE_SIZE (mode);
734
735 #ifdef FRAME_GROWS_DOWNWARD
736 frame_offset -= size;
737 #endif
738
739 /* If we have already instantiated virtual registers, return the actual
740 address relative to the frame pointer. */
741 if (virtuals_instantiated)
742 addr = plus_constant (frame_pointer_rtx,
743 (frame_offset + bigend_correction
744 + STARTING_FRAME_OFFSET));
745 else
746 addr = plus_constant (virtual_stack_vars_rtx,
747 frame_offset + bigend_correction);
748
749 #ifndef FRAME_GROWS_DOWNWARD
750 frame_offset += size;
751 #endif
752
753 x = gen_rtx_MEM (mode, addr);
754
755 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
756
757 return x;
758 }
759
760 /* Assign a stack slot in a containing function.
761 First three arguments are same as in preceding function.
762 The last argument specifies the function to allocate in. */
763
764 static rtx
765 assign_outer_stack_local (mode, size, align, function)
766 enum machine_mode mode;
767 HOST_WIDE_INT size;
768 int align;
769 struct function *function;
770 {
771 register rtx x, addr;
772 int bigend_correction = 0;
773 int alignment;
774
775 /* Allocate in the memory associated with the function in whose frame
776 we are assigning. */
777 push_obstacks (function->function_obstack,
778 function->function_maybepermanent_obstack);
779
780 if (align == 0)
781 {
782 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
783 if (mode == BLKmode)
784 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
785 }
786 else if (align == -1)
787 {
788 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
789 size = CEIL_ROUND (size, alignment);
790 }
791 else
792 alignment = align / BITS_PER_UNIT;
793
794 /* Round frame offset to that alignment. */
795 #ifdef FRAME_GROWS_DOWNWARD
796 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
797 #else
798 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
799 #endif
800
801 /* On a big-endian machine, if we are allocating more space than we will use,
802 use the least significant bytes of those that are allocated. */
803 if (BYTES_BIG_ENDIAN && mode != BLKmode)
804 bigend_correction = size - GET_MODE_SIZE (mode);
805
806 #ifdef FRAME_GROWS_DOWNWARD
807 function->frame_offset -= size;
808 #endif
809 addr = plus_constant (virtual_stack_vars_rtx,
810 function->frame_offset + bigend_correction);
811 #ifndef FRAME_GROWS_DOWNWARD
812 function->frame_offset += size;
813 #endif
814
815 x = gen_rtx_MEM (mode, addr);
816
817 function->stack_slot_list
818 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
819
820 pop_obstacks ();
821
822 return x;
823 }
824 \f
825 /* Allocate a temporary stack slot and record it for possible later
826 reuse.
827
828 MODE is the machine mode to be given to the returned rtx.
829
830 SIZE is the size in units of the space required. We do no rounding here
831 since assign_stack_local will do any required rounding.
832
833 KEEP is 1 if this slot is to be retained after a call to
834 free_temp_slots. Automatic variables for a block are allocated
835 with this flag. KEEP is 2 if we allocate a longer term temporary,
836 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
837 if we are to allocate something at an inner level to be treated as
838 a variable in the block (e.g., a SAVE_EXPR). */
839
840 rtx
841 assign_stack_temp (mode, size, keep)
842 enum machine_mode mode;
843 HOST_WIDE_INT size;
844 int keep;
845 {
846 struct temp_slot *p, *best_p = 0;
847
848 /* If SIZE is -1 it means that somebody tried to allocate a temporary
849 of a variable size. */
850 if (size == -1)
851 abort ();
852
853 /* First try to find an available, already-allocated temporary that is the
854 exact size we require. */
855 for (p = temp_slots; p; p = p->next)
856 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
857 break;
858
859 /* If we didn't find, one, try one that is larger than what we want. We
860 find the smallest such. */
861 if (p == 0)
862 for (p = temp_slots; p; p = p->next)
863 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
864 && (best_p == 0 || best_p->size > p->size))
865 best_p = p;
866
867 /* Make our best, if any, the one to use. */
868 if (best_p)
869 {
870 /* If there are enough aligned bytes left over, make them into a new
871 temp_slot so that the extra bytes don't get wasted. Do this only
872 for BLKmode slots, so that we can be sure of the alignment. */
873 if (GET_MODE (best_p->slot) == BLKmode)
874 {
875 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
876 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
877
878 if (best_p->size - rounded_size >= alignment)
879 {
880 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
881 p->in_use = p->addr_taken = 0;
882 p->size = best_p->size - rounded_size;
883 p->base_offset = best_p->base_offset + rounded_size;
884 p->full_size = best_p->full_size - rounded_size;
885 p->slot = gen_rtx_MEM (BLKmode,
886 plus_constant (XEXP (best_p->slot, 0),
887 rounded_size));
888 p->address = 0;
889 p->rtl_expr = 0;
890 p->next = temp_slots;
891 temp_slots = p;
892
893 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
894 stack_slot_list);
895
896 best_p->size = rounded_size;
897 best_p->full_size = rounded_size;
898 }
899 }
900
901 p = best_p;
902 }
903
904 /* If we still didn't find one, make a new temporary. */
905 if (p == 0)
906 {
907 HOST_WIDE_INT frame_offset_old = frame_offset;
908
909 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
910
911 /* If the temp slot mode doesn't indicate the alignment,
912 use the largest possible, so no one will be disappointed. */
913 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
914
915 /* The following slot size computation is necessary because we don't
916 know the actual size of the temporary slot until assign_stack_local
917 has performed all the frame alignment and size rounding for the
918 requested temporary. Note that extra space added for alignment
919 can be either above or below this stack slot depending on which
920 way the frame grows. We include the extra space if and only if it
921 is above this slot. */
922 #ifdef FRAME_GROWS_DOWNWARD
923 p->size = frame_offset_old - frame_offset;
924 #else
925 p->size = size;
926 #endif
927
928 /* Now define the fields used by combine_temp_slots. */
929 #ifdef FRAME_GROWS_DOWNWARD
930 p->base_offset = frame_offset;
931 p->full_size = frame_offset_old - frame_offset;
932 #else
933 p->base_offset = frame_offset_old;
934 p->full_size = frame_offset - frame_offset_old;
935 #endif
936 p->address = 0;
937 p->next = temp_slots;
938 temp_slots = p;
939 }
940
941 p->in_use = 1;
942 p->addr_taken = 0;
943 p->rtl_expr = sequence_rtl_expr;
944
945 if (keep == 2)
946 {
947 p->level = target_temp_slot_level;
948 p->keep = 0;
949 }
950 else if (keep == 3)
951 {
952 p->level = var_temp_slot_level;
953 p->keep = 0;
954 }
955 else
956 {
957 p->level = temp_slot_level;
958 p->keep = keep;
959 }
960
961 /* We may be reusing an old slot, so clear any MEM flags that may have been
962 set from before. */
963 RTX_UNCHANGING_P (p->slot) = 0;
964 MEM_IN_STRUCT_P (p->slot) = 0;
965 return p->slot;
966 }
967 \f
968 /* Assign a temporary of given TYPE.
969 KEEP is as for assign_stack_temp.
970 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
971 it is 0 if a register is OK.
972 DONT_PROMOTE is 1 if we should not promote values in register
973 to wider modes. */
974
975 rtx
976 assign_temp (type, keep, memory_required, dont_promote)
977 tree type;
978 int keep;
979 int memory_required;
980 int dont_promote;
981 {
982 enum machine_mode mode = TYPE_MODE (type);
983 int unsignedp = TREE_UNSIGNED (type);
984
985 if (mode == BLKmode || memory_required)
986 {
987 HOST_WIDE_INT size = int_size_in_bytes (type);
988 rtx tmp;
989
990 /* Unfortunately, we don't yet know how to allocate variable-sized
991 temporaries. However, sometimes we have a fixed upper limit on
992 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
993 instead. This is the case for Chill variable-sized strings. */
994 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
995 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
996 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
997 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
998
999 tmp = assign_stack_temp (mode, size, keep);
1000 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
1001 return tmp;
1002 }
1003
1004 #ifndef PROMOTE_FOR_CALL_ONLY
1005 if (! dont_promote)
1006 mode = promote_mode (type, mode, &unsignedp, 0);
1007 #endif
1008
1009 return gen_reg_rtx (mode);
1010 }
1011 \f
1012 /* Combine temporary stack slots which are adjacent on the stack.
1013
1014 This allows for better use of already allocated stack space. This is only
1015 done for BLKmode slots because we can be sure that we won't have alignment
1016 problems in this case. */
1017
1018 void
1019 combine_temp_slots ()
1020 {
1021 struct temp_slot *p, *q;
1022 struct temp_slot *prev_p, *prev_q;
1023 int num_slots;
1024
1025 /* If there are a lot of temp slots, don't do anything unless
1026 high levels of optimizaton. */
1027 if (! flag_expensive_optimizations)
1028 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1029 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1030 return;
1031
1032 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1033 {
1034 int delete_p = 0;
1035
1036 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1037 for (q = p->next, prev_q = p; q; q = prev_q->next)
1038 {
1039 int delete_q = 0;
1040 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1041 {
1042 if (p->base_offset + p->full_size == q->base_offset)
1043 {
1044 /* Q comes after P; combine Q into P. */
1045 p->size += q->size;
1046 p->full_size += q->full_size;
1047 delete_q = 1;
1048 }
1049 else if (q->base_offset + q->full_size == p->base_offset)
1050 {
1051 /* P comes after Q; combine P into Q. */
1052 q->size += p->size;
1053 q->full_size += p->full_size;
1054 delete_p = 1;
1055 break;
1056 }
1057 }
1058 /* Either delete Q or advance past it. */
1059 if (delete_q)
1060 prev_q->next = q->next;
1061 else
1062 prev_q = q;
1063 }
1064 /* Either delete P or advance past it. */
1065 if (delete_p)
1066 {
1067 if (prev_p)
1068 prev_p->next = p->next;
1069 else
1070 temp_slots = p->next;
1071 }
1072 else
1073 prev_p = p;
1074 }
1075 }
1076 \f
1077 /* Find the temp slot corresponding to the object at address X. */
1078
1079 static struct temp_slot *
1080 find_temp_slot_from_address (x)
1081 rtx x;
1082 {
1083 struct temp_slot *p;
1084 rtx next;
1085
1086 for (p = temp_slots; p; p = p->next)
1087 {
1088 if (! p->in_use)
1089 continue;
1090
1091 else if (XEXP (p->slot, 0) == x
1092 || p->address == x
1093 || (GET_CODE (x) == PLUS
1094 && XEXP (x, 0) == virtual_stack_vars_rtx
1095 && GET_CODE (XEXP (x, 1)) == CONST_INT
1096 && INTVAL (XEXP (x, 1)) >= p->base_offset
1097 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1098 return p;
1099
1100 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1101 for (next = p->address; next; next = XEXP (next, 1))
1102 if (XEXP (next, 0) == x)
1103 return p;
1104 }
1105
1106 return 0;
1107 }
1108
1109 /* Indicate that NEW is an alternate way of referring to the temp slot
1110 that previously was known by OLD. */
1111
1112 void
1113 update_temp_slot_address (old, new)
1114 rtx old, new;
1115 {
1116 struct temp_slot *p = find_temp_slot_from_address (old);
1117
1118 /* If none, return. Else add NEW as an alias. */
1119 if (p == 0)
1120 return;
1121 else if (p->address == 0)
1122 p->address = new;
1123 else
1124 {
1125 if (GET_CODE (p->address) != EXPR_LIST)
1126 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1127
1128 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1129 }
1130 }
1131
1132 /* If X could be a reference to a temporary slot, mark the fact that its
1133 address was taken. */
1134
1135 void
1136 mark_temp_addr_taken (x)
1137 rtx x;
1138 {
1139 struct temp_slot *p;
1140
1141 if (x == 0)
1142 return;
1143
1144 /* If X is not in memory or is at a constant address, it cannot be in
1145 a temporary slot. */
1146 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1147 return;
1148
1149 p = find_temp_slot_from_address (XEXP (x, 0));
1150 if (p != 0)
1151 p->addr_taken = 1;
1152 }
1153
1154 /* If X could be a reference to a temporary slot, mark that slot as
1155 belonging to the to one level higher than the current level. If X
1156 matched one of our slots, just mark that one. Otherwise, we can't
1157 easily predict which it is, so upgrade all of them. Kept slots
1158 need not be touched.
1159
1160 This is called when an ({...}) construct occurs and a statement
1161 returns a value in memory. */
1162
1163 void
1164 preserve_temp_slots (x)
1165 rtx x;
1166 {
1167 struct temp_slot *p = 0;
1168
1169 /* If there is no result, we still might have some objects whose address
1170 were taken, so we need to make sure they stay around. */
1171 if (x == 0)
1172 {
1173 for (p = temp_slots; p; p = p->next)
1174 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1175 p->level--;
1176
1177 return;
1178 }
1179
1180 /* If X is a register that is being used as a pointer, see if we have
1181 a temporary slot we know it points to. To be consistent with
1182 the code below, we really should preserve all non-kept slots
1183 if we can't find a match, but that seems to be much too costly. */
1184 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1185 p = find_temp_slot_from_address (x);
1186
1187 /* If X is not in memory or is at a constant address, it cannot be in
1188 a temporary slot, but it can contain something whose address was
1189 taken. */
1190 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1191 {
1192 for (p = temp_slots; p; p = p->next)
1193 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1194 p->level--;
1195
1196 return;
1197 }
1198
1199 /* First see if we can find a match. */
1200 if (p == 0)
1201 p = find_temp_slot_from_address (XEXP (x, 0));
1202
1203 if (p != 0)
1204 {
1205 /* Move everything at our level whose address was taken to our new
1206 level in case we used its address. */
1207 struct temp_slot *q;
1208
1209 if (p->level == temp_slot_level)
1210 {
1211 for (q = temp_slots; q; q = q->next)
1212 if (q != p && q->addr_taken && q->level == p->level)
1213 q->level--;
1214
1215 p->level--;
1216 p->addr_taken = 0;
1217 }
1218 return;
1219 }
1220
1221 /* Otherwise, preserve all non-kept slots at this level. */
1222 for (p = temp_slots; p; p = p->next)
1223 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1224 p->level--;
1225 }
1226
1227 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1228 with that RTL_EXPR, promote it into a temporary slot at the present
1229 level so it will not be freed when we free slots made in the
1230 RTL_EXPR. */
1231
1232 void
1233 preserve_rtl_expr_result (x)
1234 rtx x;
1235 {
1236 struct temp_slot *p;
1237
1238 /* If X is not in memory or is at a constant address, it cannot be in
1239 a temporary slot. */
1240 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1241 return;
1242
1243 /* If we can find a match, move it to our level unless it is already at
1244 an upper level. */
1245 p = find_temp_slot_from_address (XEXP (x, 0));
1246 if (p != 0)
1247 {
1248 p->level = MIN (p->level, temp_slot_level);
1249 p->rtl_expr = 0;
1250 }
1251
1252 return;
1253 }
1254
1255 /* Free all temporaries used so far. This is normally called at the end
1256 of generating code for a statement. Don't free any temporaries
1257 currently in use for an RTL_EXPR that hasn't yet been emitted.
1258 We could eventually do better than this since it can be reused while
1259 generating the same RTL_EXPR, but this is complex and probably not
1260 worthwhile. */
1261
1262 void
1263 free_temp_slots ()
1264 {
1265 struct temp_slot *p;
1266
1267 for (p = temp_slots; p; p = p->next)
1268 if (p->in_use && p->level == temp_slot_level && ! p->keep
1269 && p->rtl_expr == 0)
1270 p->in_use = 0;
1271
1272 combine_temp_slots ();
1273 }
1274
1275 /* Free all temporary slots used in T, an RTL_EXPR node. */
1276
1277 void
1278 free_temps_for_rtl_expr (t)
1279 tree t;
1280 {
1281 struct temp_slot *p;
1282
1283 for (p = temp_slots; p; p = p->next)
1284 if (p->rtl_expr == t)
1285 p->in_use = 0;
1286
1287 combine_temp_slots ();
1288 }
1289
1290 /* Mark all temporaries ever allocated in this function as not suitable
1291 for reuse until the current level is exited. */
1292
1293 void
1294 mark_all_temps_used ()
1295 {
1296 struct temp_slot *p;
1297
1298 for (p = temp_slots; p; p = p->next)
1299 {
1300 p->in_use = p->keep = 1;
1301 p->level = MIN (p->level, temp_slot_level);
1302 }
1303 }
1304
1305 /* Push deeper into the nesting level for stack temporaries. */
1306
1307 void
1308 push_temp_slots ()
1309 {
1310 temp_slot_level++;
1311 }
1312
1313 /* Likewise, but save the new level as the place to allocate variables
1314 for blocks. */
1315
1316 void
1317 push_temp_slots_for_block ()
1318 {
1319 push_temp_slots ();
1320
1321 var_temp_slot_level = temp_slot_level;
1322 }
1323
1324 /* Likewise, but save the new level as the place to allocate temporaries
1325 for TARGET_EXPRs. */
1326
1327 void
1328 push_temp_slots_for_target ()
1329 {
1330 push_temp_slots ();
1331
1332 target_temp_slot_level = temp_slot_level;
1333 }
1334
1335 /* Set and get the value of target_temp_slot_level. The only
1336 permitted use of these functions is to save and restore this value. */
1337
1338 int
1339 get_target_temp_slot_level ()
1340 {
1341 return target_temp_slot_level;
1342 }
1343
1344 void
1345 set_target_temp_slot_level (level)
1346 int level;
1347 {
1348 target_temp_slot_level = level;
1349 }
1350
1351 /* Pop a temporary nesting level. All slots in use in the current level
1352 are freed. */
1353
1354 void
1355 pop_temp_slots ()
1356 {
1357 struct temp_slot *p;
1358
1359 for (p = temp_slots; p; p = p->next)
1360 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1361 p->in_use = 0;
1362
1363 combine_temp_slots ();
1364
1365 temp_slot_level--;
1366 }
1367
1368 /* Initialize temporary slots. */
1369
1370 void
1371 init_temp_slots ()
1372 {
1373 /* We have not allocated any temporaries yet. */
1374 temp_slots = 0;
1375 temp_slot_level = 0;
1376 var_temp_slot_level = 0;
1377 target_temp_slot_level = 0;
1378 }
1379 \f
1380 /* Retroactively move an auto variable from a register to a stack slot.
1381 This is done when an address-reference to the variable is seen. */
1382
1383 void
1384 put_var_into_stack (decl)
1385 tree decl;
1386 {
1387 register rtx reg;
1388 enum machine_mode promoted_mode, decl_mode;
1389 struct function *function = 0;
1390 tree context;
1391 int can_use_addressof;
1392
1393 context = decl_function_context (decl);
1394
1395 /* Get the current rtl used for this object and its original mode. */
1396 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1397
1398 /* No need to do anything if decl has no rtx yet
1399 since in that case caller is setting TREE_ADDRESSABLE
1400 and a stack slot will be assigned when the rtl is made. */
1401 if (reg == 0)
1402 return;
1403
1404 /* Get the declared mode for this object. */
1405 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1406 : DECL_MODE (decl));
1407 /* Get the mode it's actually stored in. */
1408 promoted_mode = GET_MODE (reg);
1409
1410 /* If this variable comes from an outer function,
1411 find that function's saved context. */
1412 if (context != current_function_decl && context != inline_function_decl)
1413 for (function = outer_function_chain; function; function = function->next)
1414 if (function->decl == context)
1415 break;
1416
1417 /* If this is a variable-size object with a pseudo to address it,
1418 put that pseudo into the stack, if the var is nonlocal. */
1419 if (DECL_NONLOCAL (decl)
1420 && GET_CODE (reg) == MEM
1421 && GET_CODE (XEXP (reg, 0)) == REG
1422 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1423 {
1424 reg = XEXP (reg, 0);
1425 decl_mode = promoted_mode = GET_MODE (reg);
1426 }
1427
1428 can_use_addressof
1429 = (function == 0
1430 && optimize > 0
1431 /* FIXME make it work for promoted modes too */
1432 && decl_mode == promoted_mode
1433 #ifdef NON_SAVING_SETJMP
1434 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1435 #endif
1436 );
1437
1438 /* If we can't use ADDRESSOF, make sure we see through one we already
1439 generated. */
1440 if (! can_use_addressof && GET_CODE (reg) == MEM
1441 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1442 reg = XEXP (XEXP (reg, 0), 0);
1443
1444 /* Now we should have a value that resides in one or more pseudo regs. */
1445
1446 if (GET_CODE (reg) == REG)
1447 {
1448 /* If this variable lives in the current function and we don't need
1449 to put things in the stack for the sake of setjmp, try to keep it
1450 in a register until we know we actually need the address. */
1451 if (can_use_addressof)
1452 gen_mem_addressof (reg, decl);
1453 else
1454 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1455 promoted_mode, decl_mode,
1456 TREE_SIDE_EFFECTS (decl), 0,
1457 TREE_USED (decl)
1458 || DECL_INITIAL (decl) != 0);
1459 }
1460 else if (GET_CODE (reg) == CONCAT)
1461 {
1462 /* A CONCAT contains two pseudos; put them both in the stack.
1463 We do it so they end up consecutive. */
1464 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1465 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1466 #ifdef FRAME_GROWS_DOWNWARD
1467 /* Since part 0 should have a lower address, do it second. */
1468 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1469 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1470 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1471 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1472 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1473 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1474 #else
1475 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1476 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1477 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1478 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1479 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1480 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1481 #endif
1482
1483 /* Change the CONCAT into a combined MEM for both parts. */
1484 PUT_CODE (reg, MEM);
1485 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1486 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1487
1488 /* The two parts are in memory order already.
1489 Use the lower parts address as ours. */
1490 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1491 /* Prevent sharing of rtl that might lose. */
1492 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1493 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1494 }
1495 else
1496 return;
1497
1498 if (current_function_check_memory_usage)
1499 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1500 XEXP (reg, 0), ptr_mode,
1501 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1502 TYPE_MODE (sizetype),
1503 GEN_INT (MEMORY_USE_RW),
1504 TYPE_MODE (integer_type_node));
1505 }
1506
1507 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1508 into the stack frame of FUNCTION (0 means the current function).
1509 DECL_MODE is the machine mode of the user-level data type.
1510 PROMOTED_MODE is the machine mode of the register.
1511 VOLATILE_P is nonzero if this is for a "volatile" decl.
1512 USED_P is nonzero if this reg might have already been used in an insn. */
1513
1514 static void
1515 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1516 original_regno, used_p)
1517 struct function *function;
1518 rtx reg;
1519 tree type;
1520 enum machine_mode promoted_mode, decl_mode;
1521 int volatile_p;
1522 int original_regno;
1523 int used_p;
1524 {
1525 rtx new = 0;
1526 int regno = original_regno;
1527
1528 if (regno == 0)
1529 regno = REGNO (reg);
1530
1531 if (function)
1532 {
1533 if (regno < function->max_parm_reg)
1534 new = function->parm_reg_stack_loc[regno];
1535 if (new == 0)
1536 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1537 0, function);
1538 }
1539 else
1540 {
1541 if (regno < max_parm_reg)
1542 new = parm_reg_stack_loc[regno];
1543 if (new == 0)
1544 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1545 }
1546
1547 PUT_MODE (reg, decl_mode);
1548 XEXP (reg, 0) = XEXP (new, 0);
1549 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1550 MEM_VOLATILE_P (reg) = volatile_p;
1551 PUT_CODE (reg, MEM);
1552
1553 /* If this is a memory ref that contains aggregate components,
1554 mark it as such for cse and loop optimize. If we are reusing a
1555 previously generated stack slot, then we need to copy the bit in
1556 case it was set for other reasons. For instance, it is set for
1557 __builtin_va_alist. */
1558 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type) | MEM_IN_STRUCT_P (new);
1559 MEM_ALIAS_SET (reg) = get_alias_set (type);
1560
1561 /* Now make sure that all refs to the variable, previously made
1562 when it was a register, are fixed up to be valid again. */
1563
1564 if (used_p && function != 0)
1565 {
1566 struct var_refs_queue *temp;
1567
1568 /* Variable is inherited; fix it up when we get back to its function. */
1569 push_obstacks (function->function_obstack,
1570 function->function_maybepermanent_obstack);
1571
1572 /* See comment in restore_tree_status in tree.c for why this needs to be
1573 on saveable obstack. */
1574 temp
1575 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1576 temp->modified = reg;
1577 temp->promoted_mode = promoted_mode;
1578 temp->unsignedp = TREE_UNSIGNED (type);
1579 temp->next = function->fixup_var_refs_queue;
1580 function->fixup_var_refs_queue = temp;
1581 pop_obstacks ();
1582 }
1583 else if (used_p)
1584 /* Variable is local; fix it up now. */
1585 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1586 }
1587 \f
1588 static void
1589 fixup_var_refs (var, promoted_mode, unsignedp)
1590 rtx var;
1591 enum machine_mode promoted_mode;
1592 int unsignedp;
1593 {
1594 tree pending;
1595 rtx first_insn = get_insns ();
1596 struct sequence_stack *stack = sequence_stack;
1597 tree rtl_exps = rtl_expr_chain;
1598
1599 /* Must scan all insns for stack-refs that exceed the limit. */
1600 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1601
1602 /* Scan all pending sequences too. */
1603 for (; stack; stack = stack->next)
1604 {
1605 push_to_sequence (stack->first);
1606 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1607 stack->first, stack->next != 0);
1608 /* Update remembered end of sequence
1609 in case we added an insn at the end. */
1610 stack->last = get_last_insn ();
1611 end_sequence ();
1612 }
1613
1614 /* Scan all waiting RTL_EXPRs too. */
1615 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1616 {
1617 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1618 if (seq != const0_rtx && seq != 0)
1619 {
1620 push_to_sequence (seq);
1621 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1622 end_sequence ();
1623 }
1624 }
1625 }
1626 \f
1627 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1628 some part of an insn. Return a struct fixup_replacement whose OLD
1629 value is equal to X. Allocate a new structure if no such entry exists. */
1630
1631 static struct fixup_replacement *
1632 find_fixup_replacement (replacements, x)
1633 struct fixup_replacement **replacements;
1634 rtx x;
1635 {
1636 struct fixup_replacement *p;
1637
1638 /* See if we have already replaced this. */
1639 for (p = *replacements; p && p->old != x; p = p->next)
1640 ;
1641
1642 if (p == 0)
1643 {
1644 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1645 p->old = x;
1646 p->new = 0;
1647 p->next = *replacements;
1648 *replacements = p;
1649 }
1650
1651 return p;
1652 }
1653
1654 /* Scan the insn-chain starting with INSN for refs to VAR
1655 and fix them up. TOPLEVEL is nonzero if this chain is the
1656 main chain of insns for the current function. */
1657
1658 static void
1659 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1660 rtx var;
1661 enum machine_mode promoted_mode;
1662 int unsignedp;
1663 rtx insn;
1664 int toplevel;
1665 {
1666 rtx call_dest = 0;
1667
1668 while (insn)
1669 {
1670 rtx next = NEXT_INSN (insn);
1671 rtx set, prev, prev_set;
1672 rtx note;
1673
1674 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1675 {
1676 /* If this is a CLOBBER of VAR, delete it.
1677
1678 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1679 and REG_RETVAL notes too. */
1680 if (GET_CODE (PATTERN (insn)) == CLOBBER
1681 && (XEXP (PATTERN (insn), 0) == var
1682 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1683 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1684 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1685 {
1686 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1687 /* The REG_LIBCALL note will go away since we are going to
1688 turn INSN into a NOTE, so just delete the
1689 corresponding REG_RETVAL note. */
1690 remove_note (XEXP (note, 0),
1691 find_reg_note (XEXP (note, 0), REG_RETVAL,
1692 NULL_RTX));
1693
1694 /* In unoptimized compilation, we shouldn't call delete_insn
1695 except in jump.c doing warnings. */
1696 PUT_CODE (insn, NOTE);
1697 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1698 NOTE_SOURCE_FILE (insn) = 0;
1699 }
1700
1701 /* The insn to load VAR from a home in the arglist
1702 is now a no-op. When we see it, just delete it.
1703 Similarly if this is storing VAR from a register from which
1704 it was loaded in the previous insn. This will occur
1705 when an ADDRESSOF was made for an arglist slot. */
1706 else if (toplevel
1707 && (set = single_set (insn)) != 0
1708 && SET_DEST (set) == var
1709 /* If this represents the result of an insn group,
1710 don't delete the insn. */
1711 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1712 && (rtx_equal_p (SET_SRC (set), var)
1713 || (GET_CODE (SET_SRC (set)) == REG
1714 && (prev = prev_nonnote_insn (insn)) != 0
1715 && (prev_set = single_set (prev)) != 0
1716 && SET_DEST (prev_set) == SET_SRC (set)
1717 && rtx_equal_p (SET_SRC (prev_set), var))))
1718 {
1719 /* In unoptimized compilation, we shouldn't call delete_insn
1720 except in jump.c doing warnings. */
1721 PUT_CODE (insn, NOTE);
1722 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1723 NOTE_SOURCE_FILE (insn) = 0;
1724 if (insn == last_parm_insn)
1725 last_parm_insn = PREV_INSN (next);
1726 }
1727 else
1728 {
1729 struct fixup_replacement *replacements = 0;
1730 rtx next_insn = NEXT_INSN (insn);
1731
1732 if (SMALL_REGISTER_CLASSES)
1733 {
1734 /* If the insn that copies the results of a CALL_INSN
1735 into a pseudo now references VAR, we have to use an
1736 intermediate pseudo since we want the life of the
1737 return value register to be only a single insn.
1738
1739 If we don't use an intermediate pseudo, such things as
1740 address computations to make the address of VAR valid
1741 if it is not can be placed between the CALL_INSN and INSN.
1742
1743 To make sure this doesn't happen, we record the destination
1744 of the CALL_INSN and see if the next insn uses both that
1745 and VAR. */
1746
1747 if (call_dest != 0 && GET_CODE (insn) == INSN
1748 && reg_mentioned_p (var, PATTERN (insn))
1749 && reg_mentioned_p (call_dest, PATTERN (insn)))
1750 {
1751 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1752
1753 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1754
1755 PATTERN (insn) = replace_rtx (PATTERN (insn),
1756 call_dest, temp);
1757 }
1758
1759 if (GET_CODE (insn) == CALL_INSN
1760 && GET_CODE (PATTERN (insn)) == SET)
1761 call_dest = SET_DEST (PATTERN (insn));
1762 else if (GET_CODE (insn) == CALL_INSN
1763 && GET_CODE (PATTERN (insn)) == PARALLEL
1764 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1765 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1766 else
1767 call_dest = 0;
1768 }
1769
1770 /* See if we have to do anything to INSN now that VAR is in
1771 memory. If it needs to be loaded into a pseudo, use a single
1772 pseudo for the entire insn in case there is a MATCH_DUP
1773 between two operands. We pass a pointer to the head of
1774 a list of struct fixup_replacements. If fixup_var_refs_1
1775 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1776 it will record them in this list.
1777
1778 If it allocated a pseudo for any replacement, we copy into
1779 it here. */
1780
1781 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1782 &replacements);
1783
1784 /* If this is last_parm_insn, and any instructions were output
1785 after it to fix it up, then we must set last_parm_insn to
1786 the last such instruction emitted. */
1787 if (insn == last_parm_insn)
1788 last_parm_insn = PREV_INSN (next_insn);
1789
1790 while (replacements)
1791 {
1792 if (GET_CODE (replacements->new) == REG)
1793 {
1794 rtx insert_before;
1795 rtx seq;
1796
1797 /* OLD might be a (subreg (mem)). */
1798 if (GET_CODE (replacements->old) == SUBREG)
1799 replacements->old
1800 = fixup_memory_subreg (replacements->old, insn, 0);
1801 else
1802 replacements->old
1803 = fixup_stack_1 (replacements->old, insn);
1804
1805 insert_before = insn;
1806
1807 /* If we are changing the mode, do a conversion.
1808 This might be wasteful, but combine.c will
1809 eliminate much of the waste. */
1810
1811 if (GET_MODE (replacements->new)
1812 != GET_MODE (replacements->old))
1813 {
1814 start_sequence ();
1815 convert_move (replacements->new,
1816 replacements->old, unsignedp);
1817 seq = gen_sequence ();
1818 end_sequence ();
1819 }
1820 else
1821 seq = gen_move_insn (replacements->new,
1822 replacements->old);
1823
1824 emit_insn_before (seq, insert_before);
1825 }
1826
1827 replacements = replacements->next;
1828 }
1829 }
1830
1831 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1832 But don't touch other insns referred to by reg-notes;
1833 we will get them elsewhere. */
1834 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1835 if (GET_CODE (note) != INSN_LIST)
1836 XEXP (note, 0)
1837 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1838 }
1839 insn = next;
1840 }
1841 }
1842 \f
1843 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1844 See if the rtx expression at *LOC in INSN needs to be changed.
1845
1846 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1847 contain a list of original rtx's and replacements. If we find that we need
1848 to modify this insn by replacing a memory reference with a pseudo or by
1849 making a new MEM to implement a SUBREG, we consult that list to see if
1850 we have already chosen a replacement. If none has already been allocated,
1851 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1852 or the SUBREG, as appropriate, to the pseudo. */
1853
1854 static void
1855 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1856 register rtx var;
1857 enum machine_mode promoted_mode;
1858 register rtx *loc;
1859 rtx insn;
1860 struct fixup_replacement **replacements;
1861 {
1862 register int i;
1863 register rtx x = *loc;
1864 RTX_CODE code = GET_CODE (x);
1865 register char *fmt;
1866 register rtx tem, tem1;
1867 struct fixup_replacement *replacement;
1868
1869 switch (code)
1870 {
1871 case ADDRESSOF:
1872 if (XEXP (x, 0) == var)
1873 {
1874 /* Prevent sharing of rtl that might lose. */
1875 rtx sub = copy_rtx (XEXP (var, 0));
1876
1877 start_sequence ();
1878
1879 if (! validate_change (insn, loc, sub, 0))
1880 {
1881 rtx y = force_operand (sub, NULL_RTX);
1882
1883 if (! validate_change (insn, loc, y, 0))
1884 *loc = copy_to_reg (y);
1885 }
1886
1887 emit_insn_before (gen_sequence (), insn);
1888 end_sequence ();
1889 }
1890 return;
1891
1892 case MEM:
1893 if (var == x)
1894 {
1895 /* If we already have a replacement, use it. Otherwise,
1896 try to fix up this address in case it is invalid. */
1897
1898 replacement = find_fixup_replacement (replacements, var);
1899 if (replacement->new)
1900 {
1901 *loc = replacement->new;
1902 return;
1903 }
1904
1905 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1906
1907 /* Unless we are forcing memory to register or we changed the mode,
1908 we can leave things the way they are if the insn is valid. */
1909
1910 INSN_CODE (insn) = -1;
1911 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1912 && recog_memoized (insn) >= 0)
1913 return;
1914
1915 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1916 return;
1917 }
1918
1919 /* If X contains VAR, we need to unshare it here so that we update
1920 each occurrence separately. But all identical MEMs in one insn
1921 must be replaced with the same rtx because of the possibility of
1922 MATCH_DUPs. */
1923
1924 if (reg_mentioned_p (var, x))
1925 {
1926 replacement = find_fixup_replacement (replacements, x);
1927 if (replacement->new == 0)
1928 replacement->new = copy_most_rtx (x, var);
1929
1930 *loc = x = replacement->new;
1931 }
1932 break;
1933
1934 case REG:
1935 case CC0:
1936 case PC:
1937 case CONST_INT:
1938 case CONST:
1939 case SYMBOL_REF:
1940 case LABEL_REF:
1941 case CONST_DOUBLE:
1942 return;
1943
1944 case SIGN_EXTRACT:
1945 case ZERO_EXTRACT:
1946 /* Note that in some cases those types of expressions are altered
1947 by optimize_bit_field, and do not survive to get here. */
1948 if (XEXP (x, 0) == var
1949 || (GET_CODE (XEXP (x, 0)) == SUBREG
1950 && SUBREG_REG (XEXP (x, 0)) == var))
1951 {
1952 /* Get TEM as a valid MEM in the mode presently in the insn.
1953
1954 We don't worry about the possibility of MATCH_DUP here; it
1955 is highly unlikely and would be tricky to handle. */
1956
1957 tem = XEXP (x, 0);
1958 if (GET_CODE (tem) == SUBREG)
1959 {
1960 if (GET_MODE_BITSIZE (GET_MODE (tem))
1961 > GET_MODE_BITSIZE (GET_MODE (var)))
1962 {
1963 replacement = find_fixup_replacement (replacements, var);
1964 if (replacement->new == 0)
1965 replacement->new = gen_reg_rtx (GET_MODE (var));
1966 SUBREG_REG (tem) = replacement->new;
1967 }
1968 else
1969 tem = fixup_memory_subreg (tem, insn, 0);
1970 }
1971 else
1972 tem = fixup_stack_1 (tem, insn);
1973
1974 /* Unless we want to load from memory, get TEM into the proper mode
1975 for an extract from memory. This can only be done if the
1976 extract is at a constant position and length. */
1977
1978 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1979 && GET_CODE (XEXP (x, 2)) == CONST_INT
1980 && ! mode_dependent_address_p (XEXP (tem, 0))
1981 && ! MEM_VOLATILE_P (tem))
1982 {
1983 enum machine_mode wanted_mode = VOIDmode;
1984 enum machine_mode is_mode = GET_MODE (tem);
1985 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1986
1987 #ifdef HAVE_extzv
1988 if (GET_CODE (x) == ZERO_EXTRACT)
1989 {
1990 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1991 if (wanted_mode == VOIDmode)
1992 wanted_mode = word_mode;
1993 }
1994 #endif
1995 #ifdef HAVE_extv
1996 if (GET_CODE (x) == SIGN_EXTRACT)
1997 {
1998 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1999 if (wanted_mode == VOIDmode)
2000 wanted_mode = word_mode;
2001 }
2002 #endif
2003 /* If we have a narrower mode, we can do something. */
2004 if (wanted_mode != VOIDmode
2005 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2006 {
2007 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2008 rtx old_pos = XEXP (x, 2);
2009 rtx newmem;
2010
2011 /* If the bytes and bits are counted differently, we
2012 must adjust the offset. */
2013 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2014 offset = (GET_MODE_SIZE (is_mode)
2015 - GET_MODE_SIZE (wanted_mode) - offset);
2016
2017 pos %= GET_MODE_BITSIZE (wanted_mode);
2018
2019 newmem = gen_rtx_MEM (wanted_mode,
2020 plus_constant (XEXP (tem, 0), offset));
2021 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2022 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2023 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2024
2025 /* Make the change and see if the insn remains valid. */
2026 INSN_CODE (insn) = -1;
2027 XEXP (x, 0) = newmem;
2028 XEXP (x, 2) = GEN_INT (pos);
2029
2030 if (recog_memoized (insn) >= 0)
2031 return;
2032
2033 /* Otherwise, restore old position. XEXP (x, 0) will be
2034 restored later. */
2035 XEXP (x, 2) = old_pos;
2036 }
2037 }
2038
2039 /* If we get here, the bitfield extract insn can't accept a memory
2040 reference. Copy the input into a register. */
2041
2042 tem1 = gen_reg_rtx (GET_MODE (tem));
2043 emit_insn_before (gen_move_insn (tem1, tem), insn);
2044 XEXP (x, 0) = tem1;
2045 return;
2046 }
2047 break;
2048
2049 case SUBREG:
2050 if (SUBREG_REG (x) == var)
2051 {
2052 /* If this is a special SUBREG made because VAR was promoted
2053 from a wider mode, replace it with VAR and call ourself
2054 recursively, this time saying that the object previously
2055 had its current mode (by virtue of the SUBREG). */
2056
2057 if (SUBREG_PROMOTED_VAR_P (x))
2058 {
2059 *loc = var;
2060 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2061 return;
2062 }
2063
2064 /* If this SUBREG makes VAR wider, it has become a paradoxical
2065 SUBREG with VAR in memory, but these aren't allowed at this
2066 stage of the compilation. So load VAR into a pseudo and take
2067 a SUBREG of that pseudo. */
2068 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2069 {
2070 replacement = find_fixup_replacement (replacements, var);
2071 if (replacement->new == 0)
2072 replacement->new = gen_reg_rtx (GET_MODE (var));
2073 SUBREG_REG (x) = replacement->new;
2074 return;
2075 }
2076
2077 /* See if we have already found a replacement for this SUBREG.
2078 If so, use it. Otherwise, make a MEM and see if the insn
2079 is recognized. If not, or if we should force MEM into a register,
2080 make a pseudo for this SUBREG. */
2081 replacement = find_fixup_replacement (replacements, x);
2082 if (replacement->new)
2083 {
2084 *loc = replacement->new;
2085 return;
2086 }
2087
2088 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2089
2090 INSN_CODE (insn) = -1;
2091 if (! flag_force_mem && recog_memoized (insn) >= 0)
2092 return;
2093
2094 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2095 return;
2096 }
2097 break;
2098
2099 case SET:
2100 /* First do special simplification of bit-field references. */
2101 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2102 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2103 optimize_bit_field (x, insn, 0);
2104 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2105 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2106 optimize_bit_field (x, insn, NULL_PTR);
2107
2108 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2109 into a register and then store it back out. */
2110 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2111 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2112 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2113 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2114 > GET_MODE_SIZE (GET_MODE (var))))
2115 {
2116 replacement = find_fixup_replacement (replacements, var);
2117 if (replacement->new == 0)
2118 replacement->new = gen_reg_rtx (GET_MODE (var));
2119
2120 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2121 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2122 }
2123
2124 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2125 insn into a pseudo and store the low part of the pseudo into VAR. */
2126 if (GET_CODE (SET_DEST (x)) == SUBREG
2127 && SUBREG_REG (SET_DEST (x)) == var
2128 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2129 > GET_MODE_SIZE (GET_MODE (var))))
2130 {
2131 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2132 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2133 tem)),
2134 insn);
2135 break;
2136 }
2137
2138 {
2139 rtx dest = SET_DEST (x);
2140 rtx src = SET_SRC (x);
2141 #ifdef HAVE_insv
2142 rtx outerdest = dest;
2143 #endif
2144
2145 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2146 || GET_CODE (dest) == SIGN_EXTRACT
2147 || GET_CODE (dest) == ZERO_EXTRACT)
2148 dest = XEXP (dest, 0);
2149
2150 if (GET_CODE (src) == SUBREG)
2151 src = XEXP (src, 0);
2152
2153 /* If VAR does not appear at the top level of the SET
2154 just scan the lower levels of the tree. */
2155
2156 if (src != var && dest != var)
2157 break;
2158
2159 /* We will need to rerecognize this insn. */
2160 INSN_CODE (insn) = -1;
2161
2162 #ifdef HAVE_insv
2163 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2164 {
2165 /* Since this case will return, ensure we fixup all the
2166 operands here. */
2167 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2168 insn, replacements);
2169 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2170 insn, replacements);
2171 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2172 insn, replacements);
2173
2174 tem = XEXP (outerdest, 0);
2175
2176 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2177 that may appear inside a ZERO_EXTRACT.
2178 This was legitimate when the MEM was a REG. */
2179 if (GET_CODE (tem) == SUBREG
2180 && SUBREG_REG (tem) == var)
2181 tem = fixup_memory_subreg (tem, insn, 0);
2182 else
2183 tem = fixup_stack_1 (tem, insn);
2184
2185 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2186 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2187 && ! mode_dependent_address_p (XEXP (tem, 0))
2188 && ! MEM_VOLATILE_P (tem))
2189 {
2190 enum machine_mode wanted_mode;
2191 enum machine_mode is_mode = GET_MODE (tem);
2192 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2193
2194 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2195 if (wanted_mode == VOIDmode)
2196 wanted_mode = word_mode;
2197
2198 /* If we have a narrower mode, we can do something. */
2199 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2200 {
2201 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2202 rtx old_pos = XEXP (outerdest, 2);
2203 rtx newmem;
2204
2205 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2206 offset = (GET_MODE_SIZE (is_mode)
2207 - GET_MODE_SIZE (wanted_mode) - offset);
2208
2209 pos %= GET_MODE_BITSIZE (wanted_mode);
2210
2211 newmem = gen_rtx_MEM (wanted_mode,
2212 plus_constant (XEXP (tem, 0), offset));
2213 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2214 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2215 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2216
2217 /* Make the change and see if the insn remains valid. */
2218 INSN_CODE (insn) = -1;
2219 XEXP (outerdest, 0) = newmem;
2220 XEXP (outerdest, 2) = GEN_INT (pos);
2221
2222 if (recog_memoized (insn) >= 0)
2223 return;
2224
2225 /* Otherwise, restore old position. XEXP (x, 0) will be
2226 restored later. */
2227 XEXP (outerdest, 2) = old_pos;
2228 }
2229 }
2230
2231 /* If we get here, the bit-field store doesn't allow memory
2232 or isn't located at a constant position. Load the value into
2233 a register, do the store, and put it back into memory. */
2234
2235 tem1 = gen_reg_rtx (GET_MODE (tem));
2236 emit_insn_before (gen_move_insn (tem1, tem), insn);
2237 emit_insn_after (gen_move_insn (tem, tem1), insn);
2238 XEXP (outerdest, 0) = tem1;
2239 return;
2240 }
2241 #endif
2242
2243 /* STRICT_LOW_PART is a no-op on memory references
2244 and it can cause combinations to be unrecognizable,
2245 so eliminate it. */
2246
2247 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2248 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2249
2250 /* A valid insn to copy VAR into or out of a register
2251 must be left alone, to avoid an infinite loop here.
2252 If the reference to VAR is by a subreg, fix that up,
2253 since SUBREG is not valid for a memref.
2254 Also fix up the address of the stack slot.
2255
2256 Note that we must not try to recognize the insn until
2257 after we know that we have valid addresses and no
2258 (subreg (mem ...) ...) constructs, since these interfere
2259 with determining the validity of the insn. */
2260
2261 if ((SET_SRC (x) == var
2262 || (GET_CODE (SET_SRC (x)) == SUBREG
2263 && SUBREG_REG (SET_SRC (x)) == var))
2264 && (GET_CODE (SET_DEST (x)) == REG
2265 || (GET_CODE (SET_DEST (x)) == SUBREG
2266 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2267 && GET_MODE (var) == promoted_mode
2268 && x == single_set (insn))
2269 {
2270 rtx pat;
2271
2272 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2273 if (replacement->new)
2274 SET_SRC (x) = replacement->new;
2275 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2276 SET_SRC (x) = replacement->new
2277 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2278 else
2279 SET_SRC (x) = replacement->new
2280 = fixup_stack_1 (SET_SRC (x), insn);
2281
2282 if (recog_memoized (insn) >= 0)
2283 return;
2284
2285 /* INSN is not valid, but we know that we want to
2286 copy SET_SRC (x) to SET_DEST (x) in some way. So
2287 we generate the move and see whether it requires more
2288 than one insn. If it does, we emit those insns and
2289 delete INSN. Otherwise, we an just replace the pattern
2290 of INSN; we have already verified above that INSN has
2291 no other function that to do X. */
2292
2293 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2294 if (GET_CODE (pat) == SEQUENCE)
2295 {
2296 emit_insn_after (pat, insn);
2297 PUT_CODE (insn, NOTE);
2298 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2299 NOTE_SOURCE_FILE (insn) = 0;
2300 }
2301 else
2302 PATTERN (insn) = pat;
2303
2304 return;
2305 }
2306
2307 if ((SET_DEST (x) == var
2308 || (GET_CODE (SET_DEST (x)) == SUBREG
2309 && SUBREG_REG (SET_DEST (x)) == var))
2310 && (GET_CODE (SET_SRC (x)) == REG
2311 || (GET_CODE (SET_SRC (x)) == SUBREG
2312 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2313 && GET_MODE (var) == promoted_mode
2314 && x == single_set (insn))
2315 {
2316 rtx pat;
2317
2318 if (GET_CODE (SET_DEST (x)) == SUBREG)
2319 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2320 else
2321 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2322
2323 if (recog_memoized (insn) >= 0)
2324 return;
2325
2326 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2327 if (GET_CODE (pat) == SEQUENCE)
2328 {
2329 emit_insn_after (pat, insn);
2330 PUT_CODE (insn, NOTE);
2331 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2332 NOTE_SOURCE_FILE (insn) = 0;
2333 }
2334 else
2335 PATTERN (insn) = pat;
2336
2337 return;
2338 }
2339
2340 /* Otherwise, storing into VAR must be handled specially
2341 by storing into a temporary and copying that into VAR
2342 with a new insn after this one. Note that this case
2343 will be used when storing into a promoted scalar since
2344 the insn will now have different modes on the input
2345 and output and hence will be invalid (except for the case
2346 of setting it to a constant, which does not need any
2347 change if it is valid). We generate extra code in that case,
2348 but combine.c will eliminate it. */
2349
2350 if (dest == var)
2351 {
2352 rtx temp;
2353 rtx fixeddest = SET_DEST (x);
2354
2355 /* STRICT_LOW_PART can be discarded, around a MEM. */
2356 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2357 fixeddest = XEXP (fixeddest, 0);
2358 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2359 if (GET_CODE (fixeddest) == SUBREG)
2360 {
2361 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2362 promoted_mode = GET_MODE (fixeddest);
2363 }
2364 else
2365 fixeddest = fixup_stack_1 (fixeddest, insn);
2366
2367 temp = gen_reg_rtx (promoted_mode);
2368
2369 emit_insn_after (gen_move_insn (fixeddest,
2370 gen_lowpart (GET_MODE (fixeddest),
2371 temp)),
2372 insn);
2373
2374 SET_DEST (x) = temp;
2375 }
2376 }
2377
2378 default:
2379 break;
2380 }
2381
2382 /* Nothing special about this RTX; fix its operands. */
2383
2384 fmt = GET_RTX_FORMAT (code);
2385 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2386 {
2387 if (fmt[i] == 'e')
2388 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2389 if (fmt[i] == 'E')
2390 {
2391 register int j;
2392 for (j = 0; j < XVECLEN (x, i); j++)
2393 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2394 insn, replacements);
2395 }
2396 }
2397 }
2398 \f
2399 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2400 return an rtx (MEM:m1 newaddr) which is equivalent.
2401 If any insns must be emitted to compute NEWADDR, put them before INSN.
2402
2403 UNCRITICAL nonzero means accept paradoxical subregs.
2404 This is used for subregs found inside REG_NOTES. */
2405
2406 static rtx
2407 fixup_memory_subreg (x, insn, uncritical)
2408 rtx x;
2409 rtx insn;
2410 int uncritical;
2411 {
2412 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2413 rtx addr = XEXP (SUBREG_REG (x), 0);
2414 enum machine_mode mode = GET_MODE (x);
2415 rtx result;
2416
2417 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2418 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2419 && ! uncritical)
2420 abort ();
2421
2422 if (BYTES_BIG_ENDIAN)
2423 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2424 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2425 addr = plus_constant (addr, offset);
2426 if (!flag_force_addr && memory_address_p (mode, addr))
2427 /* Shortcut if no insns need be emitted. */
2428 return change_address (SUBREG_REG (x), mode, addr);
2429 start_sequence ();
2430 result = change_address (SUBREG_REG (x), mode, addr);
2431 emit_insn_before (gen_sequence (), insn);
2432 end_sequence ();
2433 return result;
2434 }
2435
2436 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2437 Replace subexpressions of X in place.
2438 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2439 Otherwise return X, with its contents possibly altered.
2440
2441 If any insns must be emitted to compute NEWADDR, put them before INSN.
2442
2443 UNCRITICAL is as in fixup_memory_subreg. */
2444
2445 static rtx
2446 walk_fixup_memory_subreg (x, insn, uncritical)
2447 register rtx x;
2448 rtx insn;
2449 int uncritical;
2450 {
2451 register enum rtx_code code;
2452 register char *fmt;
2453 register int i;
2454
2455 if (x == 0)
2456 return 0;
2457
2458 code = GET_CODE (x);
2459
2460 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2461 return fixup_memory_subreg (x, insn, uncritical);
2462
2463 /* Nothing special about this RTX; fix its operands. */
2464
2465 fmt = GET_RTX_FORMAT (code);
2466 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2467 {
2468 if (fmt[i] == 'e')
2469 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2470 if (fmt[i] == 'E')
2471 {
2472 register int j;
2473 for (j = 0; j < XVECLEN (x, i); j++)
2474 XVECEXP (x, i, j)
2475 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2476 }
2477 }
2478 return x;
2479 }
2480 \f
2481 /* For each memory ref within X, if it refers to a stack slot
2482 with an out of range displacement, put the address in a temp register
2483 (emitting new insns before INSN to load these registers)
2484 and alter the memory ref to use that register.
2485 Replace each such MEM rtx with a copy, to avoid clobberage. */
2486
2487 static rtx
2488 fixup_stack_1 (x, insn)
2489 rtx x;
2490 rtx insn;
2491 {
2492 register int i;
2493 register RTX_CODE code = GET_CODE (x);
2494 register char *fmt;
2495
2496 if (code == MEM)
2497 {
2498 register rtx ad = XEXP (x, 0);
2499 /* If we have address of a stack slot but it's not valid
2500 (displacement is too large), compute the sum in a register. */
2501 if (GET_CODE (ad) == PLUS
2502 && GET_CODE (XEXP (ad, 0)) == REG
2503 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2504 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2505 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2506 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2507 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2508 #endif
2509 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2510 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2511 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2512 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2513 {
2514 rtx temp, seq;
2515 if (memory_address_p (GET_MODE (x), ad))
2516 return x;
2517
2518 start_sequence ();
2519 temp = copy_to_reg (ad);
2520 seq = gen_sequence ();
2521 end_sequence ();
2522 emit_insn_before (seq, insn);
2523 return change_address (x, VOIDmode, temp);
2524 }
2525 return x;
2526 }
2527
2528 fmt = GET_RTX_FORMAT (code);
2529 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2530 {
2531 if (fmt[i] == 'e')
2532 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2533 if (fmt[i] == 'E')
2534 {
2535 register int j;
2536 for (j = 0; j < XVECLEN (x, i); j++)
2537 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2538 }
2539 }
2540 return x;
2541 }
2542 \f
2543 /* Optimization: a bit-field instruction whose field
2544 happens to be a byte or halfword in memory
2545 can be changed to a move instruction.
2546
2547 We call here when INSN is an insn to examine or store into a bit-field.
2548 BODY is the SET-rtx to be altered.
2549
2550 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2551 (Currently this is called only from function.c, and EQUIV_MEM
2552 is always 0.) */
2553
2554 static void
2555 optimize_bit_field (body, insn, equiv_mem)
2556 rtx body;
2557 rtx insn;
2558 rtx *equiv_mem;
2559 {
2560 register rtx bitfield;
2561 int destflag;
2562 rtx seq = 0;
2563 enum machine_mode mode;
2564
2565 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2566 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2567 bitfield = SET_DEST (body), destflag = 1;
2568 else
2569 bitfield = SET_SRC (body), destflag = 0;
2570
2571 /* First check that the field being stored has constant size and position
2572 and is in fact a byte or halfword suitably aligned. */
2573
2574 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2575 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2576 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2577 != BLKmode)
2578 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2579 {
2580 register rtx memref = 0;
2581
2582 /* Now check that the containing word is memory, not a register,
2583 and that it is safe to change the machine mode. */
2584
2585 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2586 memref = XEXP (bitfield, 0);
2587 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2588 && equiv_mem != 0)
2589 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2590 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2591 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2592 memref = SUBREG_REG (XEXP (bitfield, 0));
2593 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2594 && equiv_mem != 0
2595 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2596 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2597
2598 if (memref
2599 && ! mode_dependent_address_p (XEXP (memref, 0))
2600 && ! MEM_VOLATILE_P (memref))
2601 {
2602 /* Now adjust the address, first for any subreg'ing
2603 that we are now getting rid of,
2604 and then for which byte of the word is wanted. */
2605
2606 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2607 rtx insns;
2608
2609 /* Adjust OFFSET to count bits from low-address byte. */
2610 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2611 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2612 - offset - INTVAL (XEXP (bitfield, 1)));
2613
2614 /* Adjust OFFSET to count bytes from low-address byte. */
2615 offset /= BITS_PER_UNIT;
2616 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2617 {
2618 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2619 if (BYTES_BIG_ENDIAN)
2620 offset -= (MIN (UNITS_PER_WORD,
2621 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2622 - MIN (UNITS_PER_WORD,
2623 GET_MODE_SIZE (GET_MODE (memref))));
2624 }
2625
2626 start_sequence ();
2627 memref = change_address (memref, mode,
2628 plus_constant (XEXP (memref, 0), offset));
2629 insns = get_insns ();
2630 end_sequence ();
2631 emit_insns_before (insns, insn);
2632
2633 /* Store this memory reference where
2634 we found the bit field reference. */
2635
2636 if (destflag)
2637 {
2638 validate_change (insn, &SET_DEST (body), memref, 1);
2639 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2640 {
2641 rtx src = SET_SRC (body);
2642 while (GET_CODE (src) == SUBREG
2643 && SUBREG_WORD (src) == 0)
2644 src = SUBREG_REG (src);
2645 if (GET_MODE (src) != GET_MODE (memref))
2646 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2647 validate_change (insn, &SET_SRC (body), src, 1);
2648 }
2649 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2650 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2651 /* This shouldn't happen because anything that didn't have
2652 one of these modes should have got converted explicitly
2653 and then referenced through a subreg.
2654 This is so because the original bit-field was
2655 handled by agg_mode and so its tree structure had
2656 the same mode that memref now has. */
2657 abort ();
2658 }
2659 else
2660 {
2661 rtx dest = SET_DEST (body);
2662
2663 while (GET_CODE (dest) == SUBREG
2664 && SUBREG_WORD (dest) == 0
2665 && (GET_MODE_CLASS (GET_MODE (dest))
2666 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2667 dest = SUBREG_REG (dest);
2668
2669 validate_change (insn, &SET_DEST (body), dest, 1);
2670
2671 if (GET_MODE (dest) == GET_MODE (memref))
2672 validate_change (insn, &SET_SRC (body), memref, 1);
2673 else
2674 {
2675 /* Convert the mem ref to the destination mode. */
2676 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2677
2678 start_sequence ();
2679 convert_move (newreg, memref,
2680 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2681 seq = get_insns ();
2682 end_sequence ();
2683
2684 validate_change (insn, &SET_SRC (body), newreg, 1);
2685 }
2686 }
2687
2688 /* See if we can convert this extraction or insertion into
2689 a simple move insn. We might not be able to do so if this
2690 was, for example, part of a PARALLEL.
2691
2692 If we succeed, write out any needed conversions. If we fail,
2693 it is hard to guess why we failed, so don't do anything
2694 special; just let the optimization be suppressed. */
2695
2696 if (apply_change_group () && seq)
2697 emit_insns_before (seq, insn);
2698 }
2699 }
2700 }
2701 \f
2702 /* These routines are responsible for converting virtual register references
2703 to the actual hard register references once RTL generation is complete.
2704
2705 The following four variables are used for communication between the
2706 routines. They contain the offsets of the virtual registers from their
2707 respective hard registers. */
2708
2709 static int in_arg_offset;
2710 static int var_offset;
2711 static int dynamic_offset;
2712 static int out_arg_offset;
2713 static int cfa_offset;
2714
2715 /* In most machines, the stack pointer register is equivalent to the bottom
2716 of the stack. */
2717
2718 #ifndef STACK_POINTER_OFFSET
2719 #define STACK_POINTER_OFFSET 0
2720 #endif
2721
2722 /* If not defined, pick an appropriate default for the offset of dynamically
2723 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2724 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2725
2726 #ifndef STACK_DYNAMIC_OFFSET
2727
2728 #ifdef ACCUMULATE_OUTGOING_ARGS
2729 /* The bottom of the stack points to the actual arguments. If
2730 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2731 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2732 stack space for register parameters is not pushed by the caller, but
2733 rather part of the fixed stack areas and hence not included in
2734 `current_function_outgoing_args_size'. Nevertheless, we must allow
2735 for it when allocating stack dynamic objects. */
2736
2737 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2738 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2739 (current_function_outgoing_args_size \
2740 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2741
2742 #else
2743 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2744 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2745 #endif
2746
2747 #else
2748 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2749 #endif
2750 #endif
2751
2752 /* On a few machines, the CFA coincides with the arg pointer. */
2753
2754 #ifndef ARG_POINTER_CFA_OFFSET
2755 #define ARG_POINTER_CFA_OFFSET 0
2756 #endif
2757
2758
2759 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2760 its address taken. DECL is the decl for the object stored in the
2761 register, for later use if we do need to force REG into the stack.
2762 REG is overwritten by the MEM like in put_reg_into_stack. */
2763
2764 rtx
2765 gen_mem_addressof (reg, decl)
2766 rtx reg;
2767 tree decl;
2768 {
2769 tree type = TREE_TYPE (decl);
2770 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2771 SET_ADDRESSOF_DECL (r, decl);
2772 /* If the original REG was a user-variable, then so is the REG whose
2773 address is being taken. */
2774 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2775
2776 XEXP (reg, 0) = r;
2777 PUT_CODE (reg, MEM);
2778 PUT_MODE (reg, DECL_MODE (decl));
2779 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2780 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2781 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2782
2783 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2784 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2785
2786 return reg;
2787 }
2788
2789 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2790
2791 void
2792 flush_addressof (decl)
2793 tree decl;
2794 {
2795 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2796 && DECL_RTL (decl) != 0
2797 && GET_CODE (DECL_RTL (decl)) == MEM
2798 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2799 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2800 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2801 }
2802
2803 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2804
2805 static void
2806 put_addressof_into_stack (r)
2807 rtx r;
2808 {
2809 tree decl = ADDRESSOF_DECL (r);
2810 rtx reg = XEXP (r, 0);
2811
2812 if (GET_CODE (reg) != REG)
2813 abort ();
2814
2815 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2816 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2817 ADDRESSOF_REGNO (r),
2818 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2819 }
2820
2821 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2822 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2823 the stack. */
2824
2825 static void
2826 purge_addressof_1 (loc, insn, force, store)
2827 rtx *loc;
2828 rtx insn;
2829 int force, store;
2830 {
2831 rtx x;
2832 RTX_CODE code;
2833 int i, j;
2834 char *fmt;
2835
2836 /* Re-start here to avoid recursion in common cases. */
2837 restart:
2838
2839 x = *loc;
2840 if (x == 0)
2841 return;
2842
2843 code = GET_CODE (x);
2844
2845 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2846 {
2847 rtx insns;
2848 /* We must create a copy of the rtx because it was created by
2849 overwriting a REG rtx which is always shared. */
2850 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2851
2852 if (validate_change (insn, loc, sub, 0))
2853 return;
2854
2855 start_sequence ();
2856 if (! validate_change (insn, loc,
2857 force_operand (sub, NULL_RTX),
2858 0))
2859 abort ();
2860
2861 insns = gen_sequence ();
2862 end_sequence ();
2863 emit_insns_before (insns, insn);
2864 return;
2865 }
2866 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2867 {
2868 rtx sub = XEXP (XEXP (x, 0), 0);
2869
2870 if (GET_CODE (sub) == MEM)
2871 sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2872
2873 if (GET_CODE (sub) == REG
2874 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2875 {
2876 put_addressof_into_stack (XEXP (x, 0));
2877 return;
2878 }
2879 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2880 {
2881 int size_x, size_sub;
2882
2883 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2884 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2885
2886 /* Don't even consider working with paradoxical subregs,
2887 or the moral equivalent seen here. */
2888 if (size_x < size_sub
2889 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2890 {
2891 /* Do a bitfield insertion to mirror what would happen
2892 in memory. */
2893
2894 rtx val, seq;
2895
2896 if (store)
2897 {
2898 /* If we can't replace with a register, be afraid. */
2899
2900 start_sequence ();
2901 val = gen_reg_rtx (GET_MODE (x));
2902 if (! validate_change (insn, loc, val, 0))
2903 abort ();
2904 seq = gen_sequence ();
2905 end_sequence ();
2906 emit_insn_before (seq, insn);
2907
2908 start_sequence ();
2909 store_bit_field (sub, size_x, 0, GET_MODE (x),
2910 val, GET_MODE_SIZE (GET_MODE (sub)),
2911 GET_MODE_SIZE (GET_MODE (sub)));
2912
2913 seq = gen_sequence ();
2914 end_sequence ();
2915 emit_insn_after (seq, insn);
2916 }
2917 else
2918 {
2919 start_sequence ();
2920 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2921 GET_MODE (x), GET_MODE (x),
2922 GET_MODE_SIZE (GET_MODE (sub)),
2923 GET_MODE_SIZE (GET_MODE (sub)));
2924
2925 /* If we can't replace with a register, be afraid. */
2926 if (! validate_change (insn, loc, val, 0))
2927 abort ();
2928
2929 seq = gen_sequence ();
2930 end_sequence ();
2931 emit_insn_before (seq, insn);
2932 }
2933
2934 /* We replaced with a reg -- all done. */
2935 return;
2936 }
2937 }
2938 else if (validate_change (insn, loc, sub, 0))
2939 goto restart;
2940 /* else give up and put it into the stack */
2941 }
2942 else if (code == ADDRESSOF)
2943 {
2944 put_addressof_into_stack (x);
2945 return;
2946 }
2947 else if (code == SET)
2948 {
2949 purge_addressof_1 (&SET_DEST (x), insn, force, 1);
2950 purge_addressof_1 (&SET_SRC (x), insn, force, 0);
2951 return;
2952 }
2953
2954 /* Scan all subexpressions. */
2955 fmt = GET_RTX_FORMAT (code);
2956 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2957 {
2958 if (*fmt == 'e')
2959 purge_addressof_1 (&XEXP (x, i), insn, force, 0);
2960 else if (*fmt == 'E')
2961 for (j = 0; j < XVECLEN (x, i); j++)
2962 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0);
2963 }
2964 }
2965
2966 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2967 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2968 stack. */
2969
2970 void
2971 purge_addressof (insns)
2972 rtx insns;
2973 {
2974 rtx insn;
2975 for (insn = insns; insn; insn = NEXT_INSN (insn))
2976 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2977 || GET_CODE (insn) == CALL_INSN)
2978 {
2979 purge_addressof_1 (&PATTERN (insn), insn,
2980 asm_noperands (PATTERN (insn)) > 0, 0);
2981 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0);
2982 }
2983 }
2984 \f
2985 /* Pass through the INSNS of function FNDECL and convert virtual register
2986 references to hard register references. */
2987
2988 void
2989 instantiate_virtual_regs (fndecl, insns)
2990 tree fndecl;
2991 rtx insns;
2992 {
2993 rtx insn;
2994 int i;
2995
2996 /* Compute the offsets to use for this function. */
2997 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2998 var_offset = STARTING_FRAME_OFFSET;
2999 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3000 out_arg_offset = STACK_POINTER_OFFSET;
3001 cfa_offset = ARG_POINTER_CFA_OFFSET;
3002
3003 /* Scan all variables and parameters of this function. For each that is
3004 in memory, instantiate all virtual registers if the result is a valid
3005 address. If not, we do it later. That will handle most uses of virtual
3006 regs on many machines. */
3007 instantiate_decls (fndecl, 1);
3008
3009 /* Initialize recognition, indicating that volatile is OK. */
3010 init_recog ();
3011
3012 /* Scan through all the insns, instantiating every virtual register still
3013 present. */
3014 for (insn = insns; insn; insn = NEXT_INSN (insn))
3015 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3016 || GET_CODE (insn) == CALL_INSN)
3017 {
3018 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3019 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3020 }
3021
3022 /* Instantiate the stack slots for the parm registers, for later use in
3023 addressof elimination. */
3024 for (i = 0; i < max_parm_reg; ++i)
3025 if (parm_reg_stack_loc[i])
3026 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3027
3028 /* Now instantiate the remaining register equivalences for debugging info.
3029 These will not be valid addresses. */
3030 instantiate_decls (fndecl, 0);
3031
3032 /* Indicate that, from now on, assign_stack_local should use
3033 frame_pointer_rtx. */
3034 virtuals_instantiated = 1;
3035 }
3036
3037 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3038 all virtual registers in their DECL_RTL's.
3039
3040 If VALID_ONLY, do this only if the resulting address is still valid.
3041 Otherwise, always do it. */
3042
3043 static void
3044 instantiate_decls (fndecl, valid_only)
3045 tree fndecl;
3046 int valid_only;
3047 {
3048 tree decl;
3049
3050 if (DECL_SAVED_INSNS (fndecl))
3051 /* When compiling an inline function, the obstack used for
3052 rtl allocation is the maybepermanent_obstack. Calling
3053 `resume_temporary_allocation' switches us back to that
3054 obstack while we process this function's parameters. */
3055 resume_temporary_allocation ();
3056
3057 /* Process all parameters of the function. */
3058 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3059 {
3060 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3061
3062 instantiate_decl (DECL_RTL (decl), size, valid_only);
3063
3064 /* If the parameter was promoted, then the incoming RTL mode may be
3065 larger than the declared type size. We must use the larger of
3066 the two sizes. */
3067 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3068 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3069 }
3070
3071 /* Now process all variables defined in the function or its subblocks. */
3072 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3073
3074 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3075 {
3076 /* Save all rtl allocated for this function by raising the
3077 high-water mark on the maybepermanent_obstack. */
3078 preserve_data ();
3079 /* All further rtl allocation is now done in the current_obstack. */
3080 rtl_in_current_obstack ();
3081 }
3082 }
3083
3084 /* Subroutine of instantiate_decls: Process all decls in the given
3085 BLOCK node and all its subblocks. */
3086
3087 static void
3088 instantiate_decls_1 (let, valid_only)
3089 tree let;
3090 int valid_only;
3091 {
3092 tree t;
3093
3094 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3095 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3096 valid_only);
3097
3098 /* Process all subblocks. */
3099 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3100 instantiate_decls_1 (t, valid_only);
3101 }
3102
3103 /* Subroutine of the preceding procedures: Given RTL representing a
3104 decl and the size of the object, do any instantiation required.
3105
3106 If VALID_ONLY is non-zero, it means that the RTL should only be
3107 changed if the new address is valid. */
3108
3109 static void
3110 instantiate_decl (x, size, valid_only)
3111 rtx x;
3112 int size;
3113 int valid_only;
3114 {
3115 enum machine_mode mode;
3116 rtx addr;
3117
3118 /* If this is not a MEM, no need to do anything. Similarly if the
3119 address is a constant or a register that is not a virtual register. */
3120
3121 if (x == 0 || GET_CODE (x) != MEM)
3122 return;
3123
3124 addr = XEXP (x, 0);
3125 if (CONSTANT_P (addr)
3126 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3127 || (GET_CODE (addr) == REG
3128 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3129 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3130 return;
3131
3132 /* If we should only do this if the address is valid, copy the address.
3133 We need to do this so we can undo any changes that might make the
3134 address invalid. This copy is unfortunate, but probably can't be
3135 avoided. */
3136
3137 if (valid_only)
3138 addr = copy_rtx (addr);
3139
3140 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3141
3142 if (valid_only)
3143 {
3144 /* Now verify that the resulting address is valid for every integer or
3145 floating-point mode up to and including SIZE bytes long. We do this
3146 since the object might be accessed in any mode and frame addresses
3147 are shared. */
3148
3149 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3150 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3151 mode = GET_MODE_WIDER_MODE (mode))
3152 if (! memory_address_p (mode, addr))
3153 return;
3154
3155 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3156 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3157 mode = GET_MODE_WIDER_MODE (mode))
3158 if (! memory_address_p (mode, addr))
3159 return;
3160 }
3161
3162 /* Put back the address now that we have updated it and we either know
3163 it is valid or we don't care whether it is valid. */
3164
3165 XEXP (x, 0) = addr;
3166 }
3167 \f
3168 /* Given a pointer to a piece of rtx and an optional pointer to the
3169 containing object, instantiate any virtual registers present in it.
3170
3171 If EXTRA_INSNS, we always do the replacement and generate
3172 any extra insns before OBJECT. If it zero, we do nothing if replacement
3173 is not valid.
3174
3175 Return 1 if we either had nothing to do or if we were able to do the
3176 needed replacement. Return 0 otherwise; we only return zero if
3177 EXTRA_INSNS is zero.
3178
3179 We first try some simple transformations to avoid the creation of extra
3180 pseudos. */
3181
3182 static int
3183 instantiate_virtual_regs_1 (loc, object, extra_insns)
3184 rtx *loc;
3185 rtx object;
3186 int extra_insns;
3187 {
3188 rtx x;
3189 RTX_CODE code;
3190 rtx new = 0;
3191 HOST_WIDE_INT offset;
3192 rtx temp;
3193 rtx seq;
3194 int i, j;
3195 char *fmt;
3196
3197 /* Re-start here to avoid recursion in common cases. */
3198 restart:
3199
3200 x = *loc;
3201 if (x == 0)
3202 return 1;
3203
3204 code = GET_CODE (x);
3205
3206 /* Check for some special cases. */
3207 switch (code)
3208 {
3209 case CONST_INT:
3210 case CONST_DOUBLE:
3211 case CONST:
3212 case SYMBOL_REF:
3213 case CODE_LABEL:
3214 case PC:
3215 case CC0:
3216 case ASM_INPUT:
3217 case ADDR_VEC:
3218 case ADDR_DIFF_VEC:
3219 case RETURN:
3220 return 1;
3221
3222 case SET:
3223 /* We are allowed to set the virtual registers. This means that
3224 the actual register should receive the source minus the
3225 appropriate offset. This is used, for example, in the handling
3226 of non-local gotos. */
3227 if (SET_DEST (x) == virtual_incoming_args_rtx)
3228 new = arg_pointer_rtx, offset = - in_arg_offset;
3229 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3230 new = frame_pointer_rtx, offset = - var_offset;
3231 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3232 new = stack_pointer_rtx, offset = - dynamic_offset;
3233 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3234 new = stack_pointer_rtx, offset = - out_arg_offset;
3235 else if (SET_DEST (x) == virtual_cfa_rtx)
3236 new = arg_pointer_rtx, offset = - cfa_offset;
3237
3238 if (new)
3239 {
3240 /* The only valid sources here are PLUS or REG. Just do
3241 the simplest possible thing to handle them. */
3242 if (GET_CODE (SET_SRC (x)) != REG
3243 && GET_CODE (SET_SRC (x)) != PLUS)
3244 abort ();
3245
3246 start_sequence ();
3247 if (GET_CODE (SET_SRC (x)) != REG)
3248 temp = force_operand (SET_SRC (x), NULL_RTX);
3249 else
3250 temp = SET_SRC (x);
3251 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3252 seq = get_insns ();
3253 end_sequence ();
3254
3255 emit_insns_before (seq, object);
3256 SET_DEST (x) = new;
3257
3258 if (! validate_change (object, &SET_SRC (x), temp, 0)
3259 || ! extra_insns)
3260 abort ();
3261
3262 return 1;
3263 }
3264
3265 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3266 loc = &SET_SRC (x);
3267 goto restart;
3268
3269 case PLUS:
3270 /* Handle special case of virtual register plus constant. */
3271 if (CONSTANT_P (XEXP (x, 1)))
3272 {
3273 rtx old, new_offset;
3274
3275 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3276 if (GET_CODE (XEXP (x, 0)) == PLUS)
3277 {
3278 rtx inner = XEXP (XEXP (x, 0), 0);
3279
3280 if (inner == virtual_incoming_args_rtx)
3281 new = arg_pointer_rtx, offset = in_arg_offset;
3282 else if (inner == virtual_stack_vars_rtx)
3283 new = frame_pointer_rtx, offset = var_offset;
3284 else if (inner == virtual_stack_dynamic_rtx)
3285 new = stack_pointer_rtx, offset = dynamic_offset;
3286 else if (inner == virtual_outgoing_args_rtx)
3287 new = stack_pointer_rtx, offset = out_arg_offset;
3288 else if (inner == virtual_cfa_rtx)
3289 new = arg_pointer_rtx, offset = cfa_offset;
3290 else
3291 {
3292 loc = &XEXP (x, 0);
3293 goto restart;
3294 }
3295
3296 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3297 extra_insns);
3298 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3299 }
3300
3301 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3302 new = arg_pointer_rtx, offset = in_arg_offset;
3303 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3304 new = frame_pointer_rtx, offset = var_offset;
3305 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3306 new = stack_pointer_rtx, offset = dynamic_offset;
3307 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3308 new = stack_pointer_rtx, offset = out_arg_offset;
3309 else if (XEXP (x, 0) == virtual_cfa_rtx)
3310 new = arg_pointer_rtx, offset = cfa_offset;
3311 else
3312 {
3313 /* We know the second operand is a constant. Unless the
3314 first operand is a REG (which has been already checked),
3315 it needs to be checked. */
3316 if (GET_CODE (XEXP (x, 0)) != REG)
3317 {
3318 loc = &XEXP (x, 0);
3319 goto restart;
3320 }
3321 return 1;
3322 }
3323
3324 new_offset = plus_constant (XEXP (x, 1), offset);
3325
3326 /* If the new constant is zero, try to replace the sum with just
3327 the register. */
3328 if (new_offset == const0_rtx
3329 && validate_change (object, loc, new, 0))
3330 return 1;
3331
3332 /* Next try to replace the register and new offset.
3333 There are two changes to validate here and we can't assume that
3334 in the case of old offset equals new just changing the register
3335 will yield a valid insn. In the interests of a little efficiency,
3336 however, we only call validate change once (we don't queue up the
3337 changes and then call apply_change_group). */
3338
3339 old = XEXP (x, 0);
3340 if (offset == 0
3341 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3342 : (XEXP (x, 0) = new,
3343 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3344 {
3345 if (! extra_insns)
3346 {
3347 XEXP (x, 0) = old;
3348 return 0;
3349 }
3350
3351 /* Otherwise copy the new constant into a register and replace
3352 constant with that register. */
3353 temp = gen_reg_rtx (Pmode);
3354 XEXP (x, 0) = new;
3355 if (validate_change (object, &XEXP (x, 1), temp, 0))
3356 emit_insn_before (gen_move_insn (temp, new_offset), object);
3357 else
3358 {
3359 /* If that didn't work, replace this expression with a
3360 register containing the sum. */
3361
3362 XEXP (x, 0) = old;
3363 new = gen_rtx_PLUS (Pmode, new, new_offset);
3364
3365 start_sequence ();
3366 temp = force_operand (new, NULL_RTX);
3367 seq = get_insns ();
3368 end_sequence ();
3369
3370 emit_insns_before (seq, object);
3371 if (! validate_change (object, loc, temp, 0)
3372 && ! validate_replace_rtx (x, temp, object))
3373 abort ();
3374 }
3375 }
3376
3377 return 1;
3378 }
3379
3380 /* Fall through to generic two-operand expression case. */
3381 case EXPR_LIST:
3382 case CALL:
3383 case COMPARE:
3384 case MINUS:
3385 case MULT:
3386 case DIV: case UDIV:
3387 case MOD: case UMOD:
3388 case AND: case IOR: case XOR:
3389 case ROTATERT: case ROTATE:
3390 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3391 case NE: case EQ:
3392 case GE: case GT: case GEU: case GTU:
3393 case LE: case LT: case LEU: case LTU:
3394 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3395 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3396 loc = &XEXP (x, 0);
3397 goto restart;
3398
3399 case MEM:
3400 /* Most cases of MEM that convert to valid addresses have already been
3401 handled by our scan of decls. The only special handling we
3402 need here is to make a copy of the rtx to ensure it isn't being
3403 shared if we have to change it to a pseudo.
3404
3405 If the rtx is a simple reference to an address via a virtual register,
3406 it can potentially be shared. In such cases, first try to make it
3407 a valid address, which can also be shared. Otherwise, copy it and
3408 proceed normally.
3409
3410 First check for common cases that need no processing. These are
3411 usually due to instantiation already being done on a previous instance
3412 of a shared rtx. */
3413
3414 temp = XEXP (x, 0);
3415 if (CONSTANT_ADDRESS_P (temp)
3416 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3417 || temp == arg_pointer_rtx
3418 #endif
3419 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3420 || temp == hard_frame_pointer_rtx
3421 #endif
3422 || temp == frame_pointer_rtx)
3423 return 1;
3424
3425 if (GET_CODE (temp) == PLUS
3426 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3427 && (XEXP (temp, 0) == frame_pointer_rtx
3428 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3429 || XEXP (temp, 0) == hard_frame_pointer_rtx
3430 #endif
3431 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3432 || XEXP (temp, 0) == arg_pointer_rtx
3433 #endif
3434 ))
3435 return 1;
3436
3437 if (temp == virtual_stack_vars_rtx
3438 || temp == virtual_incoming_args_rtx
3439 || (GET_CODE (temp) == PLUS
3440 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3441 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3442 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3443 {
3444 /* This MEM may be shared. If the substitution can be done without
3445 the need to generate new pseudos, we want to do it in place
3446 so all copies of the shared rtx benefit. The call below will
3447 only make substitutions if the resulting address is still
3448 valid.
3449
3450 Note that we cannot pass X as the object in the recursive call
3451 since the insn being processed may not allow all valid
3452 addresses. However, if we were not passed on object, we can
3453 only modify X without copying it if X will have a valid
3454 address.
3455
3456 ??? Also note that this can still lose if OBJECT is an insn that
3457 has less restrictions on an address that some other insn.
3458 In that case, we will modify the shared address. This case
3459 doesn't seem very likely, though. One case where this could
3460 happen is in the case of a USE or CLOBBER reference, but we
3461 take care of that below. */
3462
3463 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3464 object ? object : x, 0))
3465 return 1;
3466
3467 /* Otherwise make a copy and process that copy. We copy the entire
3468 RTL expression since it might be a PLUS which could also be
3469 shared. */
3470 *loc = x = copy_rtx (x);
3471 }
3472
3473 /* Fall through to generic unary operation case. */
3474 case SUBREG:
3475 case STRICT_LOW_PART:
3476 case NEG: case NOT:
3477 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3478 case SIGN_EXTEND: case ZERO_EXTEND:
3479 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3480 case FLOAT: case FIX:
3481 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3482 case ABS:
3483 case SQRT:
3484 case FFS:
3485 /* These case either have just one operand or we know that we need not
3486 check the rest of the operands. */
3487 loc = &XEXP (x, 0);
3488 goto restart;
3489
3490 case USE:
3491 case CLOBBER:
3492 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3493 go ahead and make the invalid one, but do it to a copy. For a REG,
3494 just make the recursive call, since there's no chance of a problem. */
3495
3496 if ((GET_CODE (XEXP (x, 0)) == MEM
3497 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3498 0))
3499 || (GET_CODE (XEXP (x, 0)) == REG
3500 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3501 return 1;
3502
3503 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3504 loc = &XEXP (x, 0);
3505 goto restart;
3506
3507 case REG:
3508 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3509 in front of this insn and substitute the temporary. */
3510 if (x == virtual_incoming_args_rtx)
3511 new = arg_pointer_rtx, offset = in_arg_offset;
3512 else if (x == virtual_stack_vars_rtx)
3513 new = frame_pointer_rtx, offset = var_offset;
3514 else if (x == virtual_stack_dynamic_rtx)
3515 new = stack_pointer_rtx, offset = dynamic_offset;
3516 else if (x == virtual_outgoing_args_rtx)
3517 new = stack_pointer_rtx, offset = out_arg_offset;
3518 else if (x == virtual_cfa_rtx)
3519 new = arg_pointer_rtx, offset = cfa_offset;
3520
3521 if (new)
3522 {
3523 temp = plus_constant (new, offset);
3524 if (!validate_change (object, loc, temp, 0))
3525 {
3526 if (! extra_insns)
3527 return 0;
3528
3529 start_sequence ();
3530 temp = force_operand (temp, NULL_RTX);
3531 seq = get_insns ();
3532 end_sequence ();
3533
3534 emit_insns_before (seq, object);
3535 if (! validate_change (object, loc, temp, 0)
3536 && ! validate_replace_rtx (x, temp, object))
3537 abort ();
3538 }
3539 }
3540
3541 return 1;
3542
3543 case ADDRESSOF:
3544 if (GET_CODE (XEXP (x, 0)) == REG)
3545 return 1;
3546
3547 else if (GET_CODE (XEXP (x, 0)) == MEM)
3548 {
3549 /* If we have a (addressof (mem ..)), do any instantiation inside
3550 since we know we'll be making the inside valid when we finally
3551 remove the ADDRESSOF. */
3552 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3553 return 1;
3554 }
3555 break;
3556
3557 default:
3558 break;
3559 }
3560
3561 /* Scan all subexpressions. */
3562 fmt = GET_RTX_FORMAT (code);
3563 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3564 if (*fmt == 'e')
3565 {
3566 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3567 return 0;
3568 }
3569 else if (*fmt == 'E')
3570 for (j = 0; j < XVECLEN (x, i); j++)
3571 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3572 extra_insns))
3573 return 0;
3574
3575 return 1;
3576 }
3577 \f
3578 /* Optimization: assuming this function does not receive nonlocal gotos,
3579 delete the handlers for such, as well as the insns to establish
3580 and disestablish them. */
3581
3582 static void
3583 delete_handlers ()
3584 {
3585 rtx insn;
3586 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3587 {
3588 /* Delete the handler by turning off the flag that would
3589 prevent jump_optimize from deleting it.
3590 Also permit deletion of the nonlocal labels themselves
3591 if nothing local refers to them. */
3592 if (GET_CODE (insn) == CODE_LABEL)
3593 {
3594 tree t, last_t;
3595
3596 LABEL_PRESERVE_P (insn) = 0;
3597
3598 /* Remove it from the nonlocal_label list, to avoid confusing
3599 flow. */
3600 for (t = nonlocal_labels, last_t = 0; t;
3601 last_t = t, t = TREE_CHAIN (t))
3602 if (DECL_RTL (TREE_VALUE (t)) == insn)
3603 break;
3604 if (t)
3605 {
3606 if (! last_t)
3607 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3608 else
3609 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3610 }
3611 }
3612 if (GET_CODE (insn) == INSN
3613 && ((nonlocal_goto_handler_slot != 0
3614 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3615 || (nonlocal_goto_stack_level != 0
3616 && reg_mentioned_p (nonlocal_goto_stack_level,
3617 PATTERN (insn)))))
3618 delete_insn (insn);
3619 }
3620 }
3621
3622 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3623 of the current function. */
3624
3625 rtx
3626 nonlocal_label_rtx_list ()
3627 {
3628 tree t;
3629 rtx x = 0;
3630
3631 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3632 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3633
3634 return x;
3635 }
3636 \f
3637 /* Output a USE for any register use in RTL.
3638 This is used with -noreg to mark the extent of lifespan
3639 of any registers used in a user-visible variable's DECL_RTL. */
3640
3641 void
3642 use_variable (rtl)
3643 rtx rtl;
3644 {
3645 if (GET_CODE (rtl) == REG)
3646 /* This is a register variable. */
3647 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3648 else if (GET_CODE (rtl) == MEM
3649 && GET_CODE (XEXP (rtl, 0)) == REG
3650 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3651 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3652 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3653 /* This is a variable-sized structure. */
3654 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3655 }
3656
3657 /* Like use_variable except that it outputs the USEs after INSN
3658 instead of at the end of the insn-chain. */
3659
3660 void
3661 use_variable_after (rtl, insn)
3662 rtx rtl, insn;
3663 {
3664 if (GET_CODE (rtl) == REG)
3665 /* This is a register variable. */
3666 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3667 else if (GET_CODE (rtl) == MEM
3668 && GET_CODE (XEXP (rtl, 0)) == REG
3669 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3670 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3671 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3672 /* This is a variable-sized structure. */
3673 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3674 }
3675 \f
3676 int
3677 max_parm_reg_num ()
3678 {
3679 return max_parm_reg;
3680 }
3681
3682 /* Return the first insn following those generated by `assign_parms'. */
3683
3684 rtx
3685 get_first_nonparm_insn ()
3686 {
3687 if (last_parm_insn)
3688 return NEXT_INSN (last_parm_insn);
3689 return get_insns ();
3690 }
3691
3692 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3693 Crash if there is none. */
3694
3695 rtx
3696 get_first_block_beg ()
3697 {
3698 register rtx searcher;
3699 register rtx insn = get_first_nonparm_insn ();
3700
3701 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3702 if (GET_CODE (searcher) == NOTE
3703 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3704 return searcher;
3705
3706 abort (); /* Invalid call to this function. (See comments above.) */
3707 return NULL_RTX;
3708 }
3709
3710 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3711 This means a type for which function calls must pass an address to the
3712 function or get an address back from the function.
3713 EXP may be a type node or an expression (whose type is tested). */
3714
3715 int
3716 aggregate_value_p (exp)
3717 tree exp;
3718 {
3719 int i, regno, nregs;
3720 rtx reg;
3721 tree type;
3722 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3723 type = exp;
3724 else
3725 type = TREE_TYPE (exp);
3726
3727 if (RETURN_IN_MEMORY (type))
3728 return 1;
3729 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3730 and thus can't be returned in registers. */
3731 if (TREE_ADDRESSABLE (type))
3732 return 1;
3733 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3734 return 1;
3735 /* Make sure we have suitable call-clobbered regs to return
3736 the value in; if not, we must return it in memory. */
3737 reg = hard_function_value (type, 0);
3738
3739 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3740 it is OK. */
3741 if (GET_CODE (reg) != REG)
3742 return 0;
3743
3744 regno = REGNO (reg);
3745 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3746 for (i = 0; i < nregs; i++)
3747 if (! call_used_regs[regno + i])
3748 return 1;
3749 return 0;
3750 }
3751 \f
3752 /* Assign RTL expressions to the function's parameters.
3753 This may involve copying them into registers and using
3754 those registers as the RTL for them.
3755
3756 If SECOND_TIME is non-zero it means that this function is being
3757 called a second time. This is done by integrate.c when a function's
3758 compilation is deferred. We need to come back here in case the
3759 FUNCTION_ARG macro computes items needed for the rest of the compilation
3760 (such as changing which registers are fixed or caller-saved). But suppress
3761 writing any insns or setting DECL_RTL of anything in this case. */
3762
3763 void
3764 assign_parms (fndecl, second_time)
3765 tree fndecl;
3766 int second_time;
3767 {
3768 register tree parm;
3769 register rtx entry_parm = 0;
3770 register rtx stack_parm = 0;
3771 CUMULATIVE_ARGS args_so_far;
3772 enum machine_mode promoted_mode, passed_mode;
3773 enum machine_mode nominal_mode, promoted_nominal_mode;
3774 int unsignedp;
3775 /* Total space needed so far for args on the stack,
3776 given as a constant and a tree-expression. */
3777 struct args_size stack_args_size;
3778 tree fntype = TREE_TYPE (fndecl);
3779 tree fnargs = DECL_ARGUMENTS (fndecl);
3780 /* This is used for the arg pointer when referring to stack args. */
3781 rtx internal_arg_pointer;
3782 /* This is a dummy PARM_DECL that we used for the function result if
3783 the function returns a structure. */
3784 tree function_result_decl = 0;
3785 int varargs_setup = 0;
3786 rtx conversion_insns = 0;
3787
3788 /* Nonzero if the last arg is named `__builtin_va_alist',
3789 which is used on some machines for old-fashioned non-ANSI varargs.h;
3790 this should be stuck onto the stack as if it had arrived there. */
3791 int hide_last_arg
3792 = (current_function_varargs
3793 && fnargs
3794 && (parm = tree_last (fnargs)) != 0
3795 && DECL_NAME (parm)
3796 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3797 "__builtin_va_alist")));
3798
3799 /* Nonzero if function takes extra anonymous args.
3800 This means the last named arg must be on the stack
3801 right before the anonymous ones. */
3802 int stdarg
3803 = (TYPE_ARG_TYPES (fntype) != 0
3804 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3805 != void_type_node));
3806
3807 current_function_stdarg = stdarg;
3808
3809 /* If the reg that the virtual arg pointer will be translated into is
3810 not a fixed reg or is the stack pointer, make a copy of the virtual
3811 arg pointer, and address parms via the copy. The frame pointer is
3812 considered fixed even though it is not marked as such.
3813
3814 The second time through, simply use ap to avoid generating rtx. */
3815
3816 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3817 || ! (fixed_regs[ARG_POINTER_REGNUM]
3818 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3819 && ! second_time)
3820 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3821 else
3822 internal_arg_pointer = virtual_incoming_args_rtx;
3823 current_function_internal_arg_pointer = internal_arg_pointer;
3824
3825 stack_args_size.constant = 0;
3826 stack_args_size.var = 0;
3827
3828 /* If struct value address is treated as the first argument, make it so. */
3829 if (aggregate_value_p (DECL_RESULT (fndecl))
3830 && ! current_function_returns_pcc_struct
3831 && struct_value_incoming_rtx == 0)
3832 {
3833 tree type = build_pointer_type (TREE_TYPE (fntype));
3834
3835 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3836
3837 DECL_ARG_TYPE (function_result_decl) = type;
3838 TREE_CHAIN (function_result_decl) = fnargs;
3839 fnargs = function_result_decl;
3840 }
3841
3842 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3843 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3844 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3845
3846 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3847 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3848 #else
3849 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3850 #endif
3851
3852 /* We haven't yet found an argument that we must push and pretend the
3853 caller did. */
3854 current_function_pretend_args_size = 0;
3855
3856 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3857 {
3858 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3859 struct args_size stack_offset;
3860 struct args_size arg_size;
3861 int passed_pointer = 0;
3862 int did_conversion = 0;
3863 tree passed_type = DECL_ARG_TYPE (parm);
3864 tree nominal_type = TREE_TYPE (parm);
3865
3866 /* Set LAST_NAMED if this is last named arg before some
3867 anonymous args. */
3868 int last_named = ((TREE_CHAIN (parm) == 0
3869 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3870 && (stdarg || current_function_varargs));
3871 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3872 most machines, if this is a varargs/stdarg function, then we treat
3873 the last named arg as if it were anonymous too. */
3874 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
3875
3876 if (TREE_TYPE (parm) == error_mark_node
3877 /* This can happen after weird syntax errors
3878 or if an enum type is defined among the parms. */
3879 || TREE_CODE (parm) != PARM_DECL
3880 || passed_type == NULL)
3881 {
3882 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3883 = gen_rtx_MEM (BLKmode, const0_rtx);
3884 TREE_USED (parm) = 1;
3885 continue;
3886 }
3887
3888 /* For varargs.h function, save info about regs and stack space
3889 used by the individual args, not including the va_alist arg. */
3890 if (hide_last_arg && last_named)
3891 current_function_args_info = args_so_far;
3892
3893 /* Find mode of arg as it is passed, and mode of arg
3894 as it should be during execution of this function. */
3895 passed_mode = TYPE_MODE (passed_type);
3896 nominal_mode = TYPE_MODE (nominal_type);
3897
3898 /* If the parm's mode is VOID, its value doesn't matter,
3899 and avoid the usual things like emit_move_insn that could crash. */
3900 if (nominal_mode == VOIDmode)
3901 {
3902 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3903 continue;
3904 }
3905
3906 /* If the parm is to be passed as a transparent union, use the
3907 type of the first field for the tests below. We have already
3908 verified that the modes are the same. */
3909 if (DECL_TRANSPARENT_UNION (parm)
3910 || TYPE_TRANSPARENT_UNION (passed_type))
3911 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3912
3913 /* See if this arg was passed by invisible reference. It is if
3914 it is an object whose size depends on the contents of the
3915 object itself or if the machine requires these objects be passed
3916 that way. */
3917
3918 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3919 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3920 || TREE_ADDRESSABLE (passed_type)
3921 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3922 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3923 passed_type, named_arg)
3924 #endif
3925 )
3926 {
3927 passed_type = nominal_type = build_pointer_type (passed_type);
3928 passed_pointer = 1;
3929 passed_mode = nominal_mode = Pmode;
3930 }
3931
3932 promoted_mode = passed_mode;
3933
3934 #ifdef PROMOTE_FUNCTION_ARGS
3935 /* Compute the mode in which the arg is actually extended to. */
3936 unsignedp = TREE_UNSIGNED (passed_type);
3937 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3938 #endif
3939
3940 /* Let machine desc say which reg (if any) the parm arrives in.
3941 0 means it arrives on the stack. */
3942 #ifdef FUNCTION_INCOMING_ARG
3943 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3944 passed_type, named_arg);
3945 #else
3946 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3947 passed_type, named_arg);
3948 #endif
3949
3950 if (entry_parm == 0)
3951 promoted_mode = passed_mode;
3952
3953 #ifdef SETUP_INCOMING_VARARGS
3954 /* If this is the last named parameter, do any required setup for
3955 varargs or stdargs. We need to know about the case of this being an
3956 addressable type, in which case we skip the registers it
3957 would have arrived in.
3958
3959 For stdargs, LAST_NAMED will be set for two parameters, the one that
3960 is actually the last named, and the dummy parameter. We only
3961 want to do this action once.
3962
3963 Also, indicate when RTL generation is to be suppressed. */
3964 if (last_named && !varargs_setup)
3965 {
3966 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3967 current_function_pretend_args_size,
3968 second_time);
3969 varargs_setup = 1;
3970 }
3971 #endif
3972
3973 /* Determine parm's home in the stack,
3974 in case it arrives in the stack or we should pretend it did.
3975
3976 Compute the stack position and rtx where the argument arrives
3977 and its size.
3978
3979 There is one complexity here: If this was a parameter that would
3980 have been passed in registers, but wasn't only because it is
3981 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3982 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3983 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3984 0 as it was the previous time. */
3985
3986 locate_and_pad_parm (promoted_mode, passed_type,
3987 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3988 1,
3989 #else
3990 #ifdef FUNCTION_INCOMING_ARG
3991 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3992 passed_type,
3993 (named_arg
3994 || varargs_setup)) != 0,
3995 #else
3996 FUNCTION_ARG (args_so_far, promoted_mode,
3997 passed_type,
3998 named_arg || varargs_setup) != 0,
3999 #endif
4000 #endif
4001 fndecl, &stack_args_size, &stack_offset, &arg_size);
4002
4003 if (! second_time)
4004 {
4005 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4006
4007 if (offset_rtx == const0_rtx)
4008 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4009 else
4010 stack_parm = gen_rtx_MEM (promoted_mode,
4011 gen_rtx_PLUS (Pmode,
4012 internal_arg_pointer,
4013 offset_rtx));
4014
4015 /* If this is a memory ref that contains aggregate components,
4016 mark it as such for cse and loop optimize. Likewise if it
4017 is readonly. */
4018 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4019 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4020 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4021 }
4022
4023 /* If this parameter was passed both in registers and in the stack,
4024 use the copy on the stack. */
4025 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4026 entry_parm = 0;
4027
4028 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4029 /* If this parm was passed part in regs and part in memory,
4030 pretend it arrived entirely in memory
4031 by pushing the register-part onto the stack.
4032
4033 In the special case of a DImode or DFmode that is split,
4034 we could put it together in a pseudoreg directly,
4035 but for now that's not worth bothering with. */
4036
4037 if (entry_parm)
4038 {
4039 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4040 passed_type, named_arg);
4041
4042 if (nregs > 0)
4043 {
4044 current_function_pretend_args_size
4045 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4046 / (PARM_BOUNDARY / BITS_PER_UNIT)
4047 * (PARM_BOUNDARY / BITS_PER_UNIT));
4048
4049 if (! second_time)
4050 {
4051 /* Handle calls that pass values in multiple non-contiguous
4052 locations. The Irix 6 ABI has examples of this. */
4053 if (GET_CODE (entry_parm) == PARALLEL)
4054 emit_group_store (validize_mem (stack_parm), entry_parm,
4055 int_size_in_bytes (TREE_TYPE (parm)),
4056 (TYPE_ALIGN (TREE_TYPE (parm))
4057 / BITS_PER_UNIT));
4058 else
4059 move_block_from_reg (REGNO (entry_parm),
4060 validize_mem (stack_parm), nregs,
4061 int_size_in_bytes (TREE_TYPE (parm)));
4062 }
4063 entry_parm = stack_parm;
4064 }
4065 }
4066 #endif
4067
4068 /* If we didn't decide this parm came in a register,
4069 by default it came on the stack. */
4070 if (entry_parm == 0)
4071 entry_parm = stack_parm;
4072
4073 /* Record permanently how this parm was passed. */
4074 if (! second_time)
4075 DECL_INCOMING_RTL (parm) = entry_parm;
4076
4077 /* If there is actually space on the stack for this parm,
4078 count it in stack_args_size; otherwise set stack_parm to 0
4079 to indicate there is no preallocated stack slot for the parm. */
4080
4081 if (entry_parm == stack_parm
4082 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4083 /* On some machines, even if a parm value arrives in a register
4084 there is still an (uninitialized) stack slot allocated for it.
4085
4086 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4087 whether this parameter already has a stack slot allocated,
4088 because an arg block exists only if current_function_args_size
4089 is larger than some threshold, and we haven't calculated that
4090 yet. So, for now, we just assume that stack slots never exist
4091 in this case. */
4092 || REG_PARM_STACK_SPACE (fndecl) > 0
4093 #endif
4094 )
4095 {
4096 stack_args_size.constant += arg_size.constant;
4097 if (arg_size.var)
4098 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4099 }
4100 else
4101 /* No stack slot was pushed for this parm. */
4102 stack_parm = 0;
4103
4104 /* Update info on where next arg arrives in registers. */
4105
4106 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4107 passed_type, named_arg);
4108
4109 /* If this is our second time through, we are done with this parm. */
4110 if (second_time)
4111 continue;
4112
4113 /* If we can't trust the parm stack slot to be aligned enough
4114 for its ultimate type, don't use that slot after entry.
4115 We'll make another stack slot, if we need one. */
4116 {
4117 int thisparm_boundary
4118 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4119
4120 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4121 stack_parm = 0;
4122 }
4123
4124 /* If parm was passed in memory, and we need to convert it on entry,
4125 don't store it back in that same slot. */
4126 if (entry_parm != 0
4127 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4128 stack_parm = 0;
4129
4130 #if 0
4131 /* Now adjust STACK_PARM to the mode and precise location
4132 where this parameter should live during execution,
4133 if we discover that it must live in the stack during execution.
4134 To make debuggers happier on big-endian machines, we store
4135 the value in the last bytes of the space available. */
4136
4137 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4138 && stack_parm != 0)
4139 {
4140 rtx offset_rtx;
4141
4142 if (BYTES_BIG_ENDIAN
4143 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4144 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4145 - GET_MODE_SIZE (nominal_mode));
4146
4147 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4148 if (offset_rtx == const0_rtx)
4149 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4150 else
4151 stack_parm = gen_rtx_MEM (nominal_mode,
4152 gen_rtx_PLUS (Pmode,
4153 internal_arg_pointer,
4154 offset_rtx));
4155
4156 /* If this is a memory ref that contains aggregate components,
4157 mark it as such for cse and loop optimize. */
4158 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4159 }
4160 #endif /* 0 */
4161
4162 #ifdef STACK_REGS
4163 /* We need this "use" info, because the gcc-register->stack-register
4164 converter in reg-stack.c needs to know which registers are active
4165 at the start of the function call. The actual parameter loading
4166 instructions are not always available then anymore, since they might
4167 have been optimised away. */
4168
4169 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4170 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4171 #endif
4172
4173 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4174 in the mode in which it arrives.
4175 STACK_PARM is an RTX for a stack slot where the parameter can live
4176 during the function (in case we want to put it there).
4177 STACK_PARM is 0 if no stack slot was pushed for it.
4178
4179 Now output code if necessary to convert ENTRY_PARM to
4180 the type in which this function declares it,
4181 and store that result in an appropriate place,
4182 which may be a pseudo reg, may be STACK_PARM,
4183 or may be a local stack slot if STACK_PARM is 0.
4184
4185 Set DECL_RTL to that place. */
4186
4187 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4188 {
4189 /* If a BLKmode arrives in registers, copy it to a stack slot.
4190 Handle calls that pass values in multiple non-contiguous
4191 locations. The Irix 6 ABI has examples of this. */
4192 if (GET_CODE (entry_parm) == REG
4193 || GET_CODE (entry_parm) == PARALLEL)
4194 {
4195 int size_stored
4196 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4197 UNITS_PER_WORD);
4198
4199 /* Note that we will be storing an integral number of words.
4200 So we have to be careful to ensure that we allocate an
4201 integral number of words. We do this below in the
4202 assign_stack_local if space was not allocated in the argument
4203 list. If it was, this will not work if PARM_BOUNDARY is not
4204 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4205 if it becomes a problem. */
4206
4207 if (stack_parm == 0)
4208 {
4209 stack_parm
4210 = assign_stack_local (GET_MODE (entry_parm),
4211 size_stored, 0);
4212
4213 /* If this is a memory ref that contains aggregate
4214 components, mark it as such for cse and loop optimize. */
4215 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4216 }
4217
4218 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4219 abort ();
4220
4221 if (TREE_READONLY (parm))
4222 RTX_UNCHANGING_P (stack_parm) = 1;
4223
4224 /* Handle calls that pass values in multiple non-contiguous
4225 locations. The Irix 6 ABI has examples of this. */
4226 if (GET_CODE (entry_parm) == PARALLEL)
4227 emit_group_store (validize_mem (stack_parm), entry_parm,
4228 int_size_in_bytes (TREE_TYPE (parm)),
4229 (TYPE_ALIGN (TREE_TYPE (parm))
4230 / BITS_PER_UNIT));
4231 else
4232 move_block_from_reg (REGNO (entry_parm),
4233 validize_mem (stack_parm),
4234 size_stored / UNITS_PER_WORD,
4235 int_size_in_bytes (TREE_TYPE (parm)));
4236 }
4237 DECL_RTL (parm) = stack_parm;
4238 }
4239 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4240 && ! DECL_INLINE (fndecl))
4241 /* layout_decl may set this. */
4242 || TREE_ADDRESSABLE (parm)
4243 || TREE_SIDE_EFFECTS (parm)
4244 /* If -ffloat-store specified, don't put explicit
4245 float variables into registers. */
4246 || (flag_float_store
4247 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4248 /* Always assign pseudo to structure return or item passed
4249 by invisible reference. */
4250 || passed_pointer || parm == function_result_decl)
4251 {
4252 /* Store the parm in a pseudoregister during the function, but we
4253 may need to do it in a wider mode. */
4254
4255 register rtx parmreg;
4256 int regno, regnoi = 0, regnor = 0;
4257
4258 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4259
4260 promoted_nominal_mode
4261 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4262
4263 parmreg = gen_reg_rtx (promoted_nominal_mode);
4264 mark_user_reg (parmreg);
4265
4266 /* If this was an item that we received a pointer to, set DECL_RTL
4267 appropriately. */
4268 if (passed_pointer)
4269 {
4270 DECL_RTL (parm)
4271 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4272 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4273 }
4274 else
4275 DECL_RTL (parm) = parmreg;
4276
4277 /* Copy the value into the register. */
4278 if (nominal_mode != passed_mode
4279 || promoted_nominal_mode != promoted_mode)
4280 {
4281 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4282 mode, by the caller. We now have to convert it to
4283 NOMINAL_MODE, if different. However, PARMREG may be in
4284 a different mode than NOMINAL_MODE if it is being stored
4285 promoted.
4286
4287 If ENTRY_PARM is a hard register, it might be in a register
4288 not valid for operating in its mode (e.g., an odd-numbered
4289 register for a DFmode). In that case, moves are the only
4290 thing valid, so we can't do a convert from there. This
4291 occurs when the calling sequence allow such misaligned
4292 usages.
4293
4294 In addition, the conversion may involve a call, which could
4295 clobber parameters which haven't been copied to pseudo
4296 registers yet. Therefore, we must first copy the parm to
4297 a pseudo reg here, and save the conversion until after all
4298 parameters have been moved. */
4299
4300 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4301
4302 emit_move_insn (tempreg, validize_mem (entry_parm));
4303
4304 push_to_sequence (conversion_insns);
4305 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4306
4307 expand_assignment (parm,
4308 make_tree (nominal_type, tempreg), 0, 0);
4309 conversion_insns = get_insns ();
4310 did_conversion = 1;
4311 end_sequence ();
4312 }
4313 else
4314 emit_move_insn (parmreg, validize_mem (entry_parm));
4315
4316 /* If we were passed a pointer but the actual value
4317 can safely live in a register, put it in one. */
4318 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4319 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4320 && ! DECL_INLINE (fndecl))
4321 /* layout_decl may set this. */
4322 || TREE_ADDRESSABLE (parm)
4323 || TREE_SIDE_EFFECTS (parm)
4324 /* If -ffloat-store specified, don't put explicit
4325 float variables into registers. */
4326 || (flag_float_store
4327 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4328 {
4329 /* We can't use nominal_mode, because it will have been set to
4330 Pmode above. We must use the actual mode of the parm. */
4331 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4332 mark_user_reg (parmreg);
4333 emit_move_insn (parmreg, DECL_RTL (parm));
4334 DECL_RTL (parm) = parmreg;
4335 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4336 now the parm. */
4337 stack_parm = 0;
4338 }
4339 #ifdef FUNCTION_ARG_CALLEE_COPIES
4340 /* If we are passed an arg by reference and it is our responsibility
4341 to make a copy, do it now.
4342 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4343 original argument, so we must recreate them in the call to
4344 FUNCTION_ARG_CALLEE_COPIES. */
4345 /* ??? Later add code to handle the case that if the argument isn't
4346 modified, don't do the copy. */
4347
4348 else if (passed_pointer
4349 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4350 TYPE_MODE (DECL_ARG_TYPE (parm)),
4351 DECL_ARG_TYPE (parm),
4352 named_arg)
4353 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4354 {
4355 rtx copy;
4356 tree type = DECL_ARG_TYPE (parm);
4357
4358 /* This sequence may involve a library call perhaps clobbering
4359 registers that haven't been copied to pseudos yet. */
4360
4361 push_to_sequence (conversion_insns);
4362
4363 if (TYPE_SIZE (type) == 0
4364 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4365 /* This is a variable sized object. */
4366 copy = gen_rtx_MEM (BLKmode,
4367 allocate_dynamic_stack_space
4368 (expr_size (parm), NULL_RTX,
4369 TYPE_ALIGN (type)));
4370 else
4371 copy = assign_stack_temp (TYPE_MODE (type),
4372 int_size_in_bytes (type), 1);
4373 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4374 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4375
4376 store_expr (parm, copy, 0);
4377 emit_move_insn (parmreg, XEXP (copy, 0));
4378 if (current_function_check_memory_usage)
4379 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4380 XEXP (copy, 0), ptr_mode,
4381 GEN_INT (int_size_in_bytes (type)),
4382 TYPE_MODE (sizetype),
4383 GEN_INT (MEMORY_USE_RW),
4384 TYPE_MODE (integer_type_node));
4385 conversion_insns = get_insns ();
4386 did_conversion = 1;
4387 end_sequence ();
4388 }
4389 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4390
4391 /* In any case, record the parm's desired stack location
4392 in case we later discover it must live in the stack.
4393
4394 If it is a COMPLEX value, store the stack location for both
4395 halves. */
4396
4397 if (GET_CODE (parmreg) == CONCAT)
4398 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4399 else
4400 regno = REGNO (parmreg);
4401
4402 if (regno >= max_parm_reg)
4403 {
4404 rtx *new;
4405 int old_max_parm_reg = max_parm_reg;
4406
4407 /* It's slow to expand this one register at a time,
4408 but it's also rare and we need max_parm_reg to be
4409 precisely correct. */
4410 max_parm_reg = regno + 1;
4411 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4412 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4413 old_max_parm_reg * sizeof (rtx));
4414 bzero ((char *) (new + old_max_parm_reg),
4415 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4416 parm_reg_stack_loc = new;
4417 }
4418
4419 if (GET_CODE (parmreg) == CONCAT)
4420 {
4421 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4422
4423 regnor = REGNO (gen_realpart (submode, parmreg));
4424 regnoi = REGNO (gen_imagpart (submode, parmreg));
4425
4426 if (stack_parm != 0)
4427 {
4428 parm_reg_stack_loc[regnor]
4429 = gen_realpart (submode, stack_parm);
4430 parm_reg_stack_loc[regnoi]
4431 = gen_imagpart (submode, stack_parm);
4432 }
4433 else
4434 {
4435 parm_reg_stack_loc[regnor] = 0;
4436 parm_reg_stack_loc[regnoi] = 0;
4437 }
4438 }
4439 else
4440 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4441
4442 /* Mark the register as eliminable if we did no conversion
4443 and it was copied from memory at a fixed offset,
4444 and the arg pointer was not copied to a pseudo-reg.
4445 If the arg pointer is a pseudo reg or the offset formed
4446 an invalid address, such memory-equivalences
4447 as we make here would screw up life analysis for it. */
4448 if (nominal_mode == passed_mode
4449 && ! did_conversion
4450 && stack_parm != 0
4451 && GET_CODE (stack_parm) == MEM
4452 && stack_offset.var == 0
4453 && reg_mentioned_p (virtual_incoming_args_rtx,
4454 XEXP (stack_parm, 0)))
4455 {
4456 rtx linsn = get_last_insn ();
4457 rtx sinsn, set;
4458
4459 /* Mark complex types separately. */
4460 if (GET_CODE (parmreg) == CONCAT)
4461 /* Scan backwards for the set of the real and
4462 imaginary parts. */
4463 for (sinsn = linsn; sinsn != 0;
4464 sinsn = prev_nonnote_insn (sinsn))
4465 {
4466 set = single_set (sinsn);
4467 if (set != 0
4468 && SET_DEST (set) == regno_reg_rtx [regnoi])
4469 REG_NOTES (sinsn)
4470 = gen_rtx_EXPR_LIST (REG_EQUIV,
4471 parm_reg_stack_loc[regnoi],
4472 REG_NOTES (sinsn));
4473 else if (set != 0
4474 && SET_DEST (set) == regno_reg_rtx [regnor])
4475 REG_NOTES (sinsn)
4476 = gen_rtx_EXPR_LIST (REG_EQUIV,
4477 parm_reg_stack_loc[regnor],
4478 REG_NOTES (sinsn));
4479 }
4480 else if ((set = single_set (linsn)) != 0
4481 && SET_DEST (set) == parmreg)
4482 REG_NOTES (linsn)
4483 = gen_rtx_EXPR_LIST (REG_EQUIV,
4484 stack_parm, REG_NOTES (linsn));
4485 }
4486
4487 /* For pointer data type, suggest pointer register. */
4488 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4489 mark_reg_pointer (parmreg,
4490 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4491 / BITS_PER_UNIT));
4492 }
4493 else
4494 {
4495 /* Value must be stored in the stack slot STACK_PARM
4496 during function execution. */
4497
4498 if (promoted_mode != nominal_mode)
4499 {
4500 /* Conversion is required. */
4501 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4502
4503 emit_move_insn (tempreg, validize_mem (entry_parm));
4504
4505 push_to_sequence (conversion_insns);
4506 entry_parm = convert_to_mode (nominal_mode, tempreg,
4507 TREE_UNSIGNED (TREE_TYPE (parm)));
4508 if (stack_parm)
4509 {
4510 /* ??? This may need a big-endian conversion on sparc64. */
4511 stack_parm = change_address (stack_parm, nominal_mode,
4512 NULL_RTX);
4513 }
4514 conversion_insns = get_insns ();
4515 did_conversion = 1;
4516 end_sequence ();
4517 }
4518
4519 if (entry_parm != stack_parm)
4520 {
4521 if (stack_parm == 0)
4522 {
4523 stack_parm
4524 = assign_stack_local (GET_MODE (entry_parm),
4525 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4526 /* If this is a memory ref that contains aggregate components,
4527 mark it as such for cse and loop optimize. */
4528 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4529 }
4530
4531 if (promoted_mode != nominal_mode)
4532 {
4533 push_to_sequence (conversion_insns);
4534 emit_move_insn (validize_mem (stack_parm),
4535 validize_mem (entry_parm));
4536 conversion_insns = get_insns ();
4537 end_sequence ();
4538 }
4539 else
4540 emit_move_insn (validize_mem (stack_parm),
4541 validize_mem (entry_parm));
4542 }
4543 if (current_function_check_memory_usage)
4544 {
4545 push_to_sequence (conversion_insns);
4546 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4547 XEXP (stack_parm, 0), ptr_mode,
4548 GEN_INT (GET_MODE_SIZE (GET_MODE
4549 (entry_parm))),
4550 TYPE_MODE (sizetype),
4551 GEN_INT (MEMORY_USE_RW),
4552 TYPE_MODE (integer_type_node));
4553
4554 conversion_insns = get_insns ();
4555 end_sequence ();
4556 }
4557 DECL_RTL (parm) = stack_parm;
4558 }
4559
4560 /* If this "parameter" was the place where we are receiving the
4561 function's incoming structure pointer, set up the result. */
4562 if (parm == function_result_decl)
4563 {
4564 tree result = DECL_RESULT (fndecl);
4565 tree restype = TREE_TYPE (result);
4566
4567 DECL_RTL (result)
4568 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4569
4570 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4571 }
4572
4573 if (TREE_THIS_VOLATILE (parm))
4574 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4575 if (TREE_READONLY (parm))
4576 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4577 }
4578
4579 /* Output all parameter conversion instructions (possibly including calls)
4580 now that all parameters have been copied out of hard registers. */
4581 emit_insns (conversion_insns);
4582
4583 last_parm_insn = get_last_insn ();
4584
4585 current_function_args_size = stack_args_size.constant;
4586
4587 /* Adjust function incoming argument size for alignment and
4588 minimum length. */
4589
4590 #ifdef REG_PARM_STACK_SPACE
4591 #ifndef MAYBE_REG_PARM_STACK_SPACE
4592 current_function_args_size = MAX (current_function_args_size,
4593 REG_PARM_STACK_SPACE (fndecl));
4594 #endif
4595 #endif
4596
4597 #ifdef STACK_BOUNDARY
4598 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4599
4600 current_function_args_size
4601 = ((current_function_args_size + STACK_BYTES - 1)
4602 / STACK_BYTES) * STACK_BYTES;
4603 #endif
4604
4605 #ifdef ARGS_GROW_DOWNWARD
4606 current_function_arg_offset_rtx
4607 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4608 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4609 size_int (-stack_args_size.constant)),
4610 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4611 #else
4612 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4613 #endif
4614
4615 /* See how many bytes, if any, of its args a function should try to pop
4616 on return. */
4617
4618 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4619 current_function_args_size);
4620
4621 /* For stdarg.h function, save info about
4622 regs and stack space used by the named args. */
4623
4624 if (!hide_last_arg)
4625 current_function_args_info = args_so_far;
4626
4627 /* Set the rtx used for the function return value. Put this in its
4628 own variable so any optimizers that need this information don't have
4629 to include tree.h. Do this here so it gets done when an inlined
4630 function gets output. */
4631
4632 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4633 }
4634 \f
4635 /* Indicate whether REGNO is an incoming argument to the current function
4636 that was promoted to a wider mode. If so, return the RTX for the
4637 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4638 that REGNO is promoted from and whether the promotion was signed or
4639 unsigned. */
4640
4641 #ifdef PROMOTE_FUNCTION_ARGS
4642
4643 rtx
4644 promoted_input_arg (regno, pmode, punsignedp)
4645 int regno;
4646 enum machine_mode *pmode;
4647 int *punsignedp;
4648 {
4649 tree arg;
4650
4651 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4652 arg = TREE_CHAIN (arg))
4653 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4654 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4655 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4656 {
4657 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4658 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4659
4660 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4661 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4662 && mode != DECL_MODE (arg))
4663 {
4664 *pmode = DECL_MODE (arg);
4665 *punsignedp = unsignedp;
4666 return DECL_INCOMING_RTL (arg);
4667 }
4668 }
4669
4670 return 0;
4671 }
4672
4673 #endif
4674 \f
4675 /* Compute the size and offset from the start of the stacked arguments for a
4676 parm passed in mode PASSED_MODE and with type TYPE.
4677
4678 INITIAL_OFFSET_PTR points to the current offset into the stacked
4679 arguments.
4680
4681 The starting offset and size for this parm are returned in *OFFSET_PTR
4682 and *ARG_SIZE_PTR, respectively.
4683
4684 IN_REGS is non-zero if the argument will be passed in registers. It will
4685 never be set if REG_PARM_STACK_SPACE is not defined.
4686
4687 FNDECL is the function in which the argument was defined.
4688
4689 There are two types of rounding that are done. The first, controlled by
4690 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4691 list to be aligned to the specific boundary (in bits). This rounding
4692 affects the initial and starting offsets, but not the argument size.
4693
4694 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4695 optionally rounds the size of the parm to PARM_BOUNDARY. The
4696 initial offset is not affected by this rounding, while the size always
4697 is and the starting offset may be. */
4698
4699 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4700 initial_offset_ptr is positive because locate_and_pad_parm's
4701 callers pass in the total size of args so far as
4702 initial_offset_ptr. arg_size_ptr is always positive.*/
4703
4704 void
4705 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4706 initial_offset_ptr, offset_ptr, arg_size_ptr)
4707 enum machine_mode passed_mode;
4708 tree type;
4709 int in_regs;
4710 tree fndecl;
4711 struct args_size *initial_offset_ptr;
4712 struct args_size *offset_ptr;
4713 struct args_size *arg_size_ptr;
4714 {
4715 tree sizetree
4716 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4717 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4718 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4719
4720 #ifdef REG_PARM_STACK_SPACE
4721 /* If we have found a stack parm before we reach the end of the
4722 area reserved for registers, skip that area. */
4723 if (! in_regs)
4724 {
4725 int reg_parm_stack_space = 0;
4726
4727 #ifdef MAYBE_REG_PARM_STACK_SPACE
4728 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4729 #else
4730 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4731 #endif
4732 if (reg_parm_stack_space > 0)
4733 {
4734 if (initial_offset_ptr->var)
4735 {
4736 initial_offset_ptr->var
4737 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4738 size_int (reg_parm_stack_space));
4739 initial_offset_ptr->constant = 0;
4740 }
4741 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4742 initial_offset_ptr->constant = reg_parm_stack_space;
4743 }
4744 }
4745 #endif /* REG_PARM_STACK_SPACE */
4746
4747 arg_size_ptr->var = 0;
4748 arg_size_ptr->constant = 0;
4749
4750 #ifdef ARGS_GROW_DOWNWARD
4751 if (initial_offset_ptr->var)
4752 {
4753 offset_ptr->constant = 0;
4754 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4755 initial_offset_ptr->var);
4756 }
4757 else
4758 {
4759 offset_ptr->constant = - initial_offset_ptr->constant;
4760 offset_ptr->var = 0;
4761 }
4762 if (where_pad != none
4763 && (TREE_CODE (sizetree) != INTEGER_CST
4764 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4765 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4766 SUB_PARM_SIZE (*offset_ptr, sizetree);
4767 if (where_pad != downward)
4768 pad_to_arg_alignment (offset_ptr, boundary);
4769 if (initial_offset_ptr->var)
4770 {
4771 arg_size_ptr->var = size_binop (MINUS_EXPR,
4772 size_binop (MINUS_EXPR,
4773 integer_zero_node,
4774 initial_offset_ptr->var),
4775 offset_ptr->var);
4776 }
4777 else
4778 {
4779 arg_size_ptr->constant = (- initial_offset_ptr->constant
4780 - offset_ptr->constant);
4781 }
4782 #else /* !ARGS_GROW_DOWNWARD */
4783 pad_to_arg_alignment (initial_offset_ptr, boundary);
4784 *offset_ptr = *initial_offset_ptr;
4785
4786 #ifdef PUSH_ROUNDING
4787 if (passed_mode != BLKmode)
4788 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4789 #endif
4790
4791 /* Pad_below needs the pre-rounded size to know how much to pad below
4792 so this must be done before rounding up. */
4793 if (where_pad == downward
4794 /* However, BLKmode args passed in regs have their padding done elsewhere.
4795 The stack slot must be able to hold the entire register. */
4796 && !(in_regs && passed_mode == BLKmode))
4797 pad_below (offset_ptr, passed_mode, sizetree);
4798
4799 if (where_pad != none
4800 && (TREE_CODE (sizetree) != INTEGER_CST
4801 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4802 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4803
4804 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4805 #endif /* ARGS_GROW_DOWNWARD */
4806 }
4807
4808 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4809 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4810
4811 static void
4812 pad_to_arg_alignment (offset_ptr, boundary)
4813 struct args_size *offset_ptr;
4814 int boundary;
4815 {
4816 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4817
4818 if (boundary > BITS_PER_UNIT)
4819 {
4820 if (offset_ptr->var)
4821 {
4822 offset_ptr->var =
4823 #ifdef ARGS_GROW_DOWNWARD
4824 round_down
4825 #else
4826 round_up
4827 #endif
4828 (ARGS_SIZE_TREE (*offset_ptr),
4829 boundary / BITS_PER_UNIT);
4830 offset_ptr->constant = 0; /*?*/
4831 }
4832 else
4833 offset_ptr->constant =
4834 #ifdef ARGS_GROW_DOWNWARD
4835 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4836 #else
4837 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4838 #endif
4839 }
4840 }
4841
4842 #ifndef ARGS_GROW_DOWNWARD
4843 static void
4844 pad_below (offset_ptr, passed_mode, sizetree)
4845 struct args_size *offset_ptr;
4846 enum machine_mode passed_mode;
4847 tree sizetree;
4848 {
4849 if (passed_mode != BLKmode)
4850 {
4851 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4852 offset_ptr->constant
4853 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4854 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4855 - GET_MODE_SIZE (passed_mode));
4856 }
4857 else
4858 {
4859 if (TREE_CODE (sizetree) != INTEGER_CST
4860 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4861 {
4862 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4863 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4864 /* Add it in. */
4865 ADD_PARM_SIZE (*offset_ptr, s2);
4866 SUB_PARM_SIZE (*offset_ptr, sizetree);
4867 }
4868 }
4869 }
4870 #endif
4871
4872 #ifdef ARGS_GROW_DOWNWARD
4873 static tree
4874 round_down (value, divisor)
4875 tree value;
4876 int divisor;
4877 {
4878 return size_binop (MULT_EXPR,
4879 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4880 size_int (divisor));
4881 }
4882 #endif
4883 \f
4884 /* Walk the tree of blocks describing the binding levels within a function
4885 and warn about uninitialized variables.
4886 This is done after calling flow_analysis and before global_alloc
4887 clobbers the pseudo-regs to hard regs. */
4888
4889 void
4890 uninitialized_vars_warning (block)
4891 tree block;
4892 {
4893 register tree decl, sub;
4894 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4895 {
4896 if (TREE_CODE (decl) == VAR_DECL
4897 /* These warnings are unreliable for and aggregates
4898 because assigning the fields one by one can fail to convince
4899 flow.c that the entire aggregate was initialized.
4900 Unions are troublesome because members may be shorter. */
4901 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4902 && DECL_RTL (decl) != 0
4903 && GET_CODE (DECL_RTL (decl)) == REG
4904 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4905 warning_with_decl (decl,
4906 "`%s' might be used uninitialized in this function");
4907 if (TREE_CODE (decl) == VAR_DECL
4908 && DECL_RTL (decl) != 0
4909 && GET_CODE (DECL_RTL (decl)) == REG
4910 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4911 warning_with_decl (decl,
4912 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4913 }
4914 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4915 uninitialized_vars_warning (sub);
4916 }
4917
4918 /* Do the appropriate part of uninitialized_vars_warning
4919 but for arguments instead of local variables. */
4920
4921 void
4922 setjmp_args_warning ()
4923 {
4924 register tree decl;
4925 for (decl = DECL_ARGUMENTS (current_function_decl);
4926 decl; decl = TREE_CHAIN (decl))
4927 if (DECL_RTL (decl) != 0
4928 && GET_CODE (DECL_RTL (decl)) == REG
4929 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4930 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4931 }
4932
4933 /* If this function call setjmp, put all vars into the stack
4934 unless they were declared `register'. */
4935
4936 void
4937 setjmp_protect (block)
4938 tree block;
4939 {
4940 register tree decl, sub;
4941 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4942 if ((TREE_CODE (decl) == VAR_DECL
4943 || TREE_CODE (decl) == PARM_DECL)
4944 && DECL_RTL (decl) != 0
4945 && (GET_CODE (DECL_RTL (decl)) == REG
4946 || (GET_CODE (DECL_RTL (decl)) == MEM
4947 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4948 /* If this variable came from an inline function, it must be
4949 that its life doesn't overlap the setjmp. If there was a
4950 setjmp in the function, it would already be in memory. We
4951 must exclude such variable because their DECL_RTL might be
4952 set to strange things such as virtual_stack_vars_rtx. */
4953 && ! DECL_FROM_INLINE (decl)
4954 && (
4955 #ifdef NON_SAVING_SETJMP
4956 /* If longjmp doesn't restore the registers,
4957 don't put anything in them. */
4958 NON_SAVING_SETJMP
4959 ||
4960 #endif
4961 ! DECL_REGISTER (decl)))
4962 put_var_into_stack (decl);
4963 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4964 setjmp_protect (sub);
4965 }
4966 \f
4967 /* Like the previous function, but for args instead of local variables. */
4968
4969 void
4970 setjmp_protect_args ()
4971 {
4972 register tree decl;
4973 for (decl = DECL_ARGUMENTS (current_function_decl);
4974 decl; decl = TREE_CHAIN (decl))
4975 if ((TREE_CODE (decl) == VAR_DECL
4976 || TREE_CODE (decl) == PARM_DECL)
4977 && DECL_RTL (decl) != 0
4978 && (GET_CODE (DECL_RTL (decl)) == REG
4979 || (GET_CODE (DECL_RTL (decl)) == MEM
4980 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4981 && (
4982 /* If longjmp doesn't restore the registers,
4983 don't put anything in them. */
4984 #ifdef NON_SAVING_SETJMP
4985 NON_SAVING_SETJMP
4986 ||
4987 #endif
4988 ! DECL_REGISTER (decl)))
4989 put_var_into_stack (decl);
4990 }
4991 \f
4992 /* Return the context-pointer register corresponding to DECL,
4993 or 0 if it does not need one. */
4994
4995 rtx
4996 lookup_static_chain (decl)
4997 tree decl;
4998 {
4999 tree context = decl_function_context (decl);
5000 tree link;
5001
5002 if (context == 0
5003 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5004 return 0;
5005
5006 /* We treat inline_function_decl as an alias for the current function
5007 because that is the inline function whose vars, types, etc.
5008 are being merged into the current function.
5009 See expand_inline_function. */
5010 if (context == current_function_decl || context == inline_function_decl)
5011 return virtual_stack_vars_rtx;
5012
5013 for (link = context_display; link; link = TREE_CHAIN (link))
5014 if (TREE_PURPOSE (link) == context)
5015 return RTL_EXPR_RTL (TREE_VALUE (link));
5016
5017 abort ();
5018 }
5019 \f
5020 /* Convert a stack slot address ADDR for variable VAR
5021 (from a containing function)
5022 into an address valid in this function (using a static chain). */
5023
5024 rtx
5025 fix_lexical_addr (addr, var)
5026 rtx addr;
5027 tree var;
5028 {
5029 rtx basereg;
5030 HOST_WIDE_INT displacement;
5031 tree context = decl_function_context (var);
5032 struct function *fp;
5033 rtx base = 0;
5034
5035 /* If this is the present function, we need not do anything. */
5036 if (context == current_function_decl || context == inline_function_decl)
5037 return addr;
5038
5039 for (fp = outer_function_chain; fp; fp = fp->next)
5040 if (fp->decl == context)
5041 break;
5042
5043 if (fp == 0)
5044 abort ();
5045
5046 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5047 addr = XEXP (XEXP (addr, 0), 0);
5048
5049 /* Decode given address as base reg plus displacement. */
5050 if (GET_CODE (addr) == REG)
5051 basereg = addr, displacement = 0;
5052 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5053 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5054 else
5055 abort ();
5056
5057 /* We accept vars reached via the containing function's
5058 incoming arg pointer and via its stack variables pointer. */
5059 if (basereg == fp->internal_arg_pointer)
5060 {
5061 /* If reached via arg pointer, get the arg pointer value
5062 out of that function's stack frame.
5063
5064 There are two cases: If a separate ap is needed, allocate a
5065 slot in the outer function for it and dereference it that way.
5066 This is correct even if the real ap is actually a pseudo.
5067 Otherwise, just adjust the offset from the frame pointer to
5068 compensate. */
5069
5070 #ifdef NEED_SEPARATE_AP
5071 rtx addr;
5072
5073 if (fp->arg_pointer_save_area == 0)
5074 fp->arg_pointer_save_area
5075 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5076
5077 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5078 addr = memory_address (Pmode, addr);
5079
5080 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5081 #else
5082 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5083 base = lookup_static_chain (var);
5084 #endif
5085 }
5086
5087 else if (basereg == virtual_stack_vars_rtx)
5088 {
5089 /* This is the same code as lookup_static_chain, duplicated here to
5090 avoid an extra call to decl_function_context. */
5091 tree link;
5092
5093 for (link = context_display; link; link = TREE_CHAIN (link))
5094 if (TREE_PURPOSE (link) == context)
5095 {
5096 base = RTL_EXPR_RTL (TREE_VALUE (link));
5097 break;
5098 }
5099 }
5100
5101 if (base == 0)
5102 abort ();
5103
5104 /* Use same offset, relative to appropriate static chain or argument
5105 pointer. */
5106 return plus_constant (base, displacement);
5107 }
5108 \f
5109 /* Return the address of the trampoline for entering nested fn FUNCTION.
5110 If necessary, allocate a trampoline (in the stack frame)
5111 and emit rtl to initialize its contents (at entry to this function). */
5112
5113 rtx
5114 trampoline_address (function)
5115 tree function;
5116 {
5117 tree link;
5118 tree rtlexp;
5119 rtx tramp;
5120 struct function *fp;
5121 tree fn_context;
5122
5123 /* Find an existing trampoline and return it. */
5124 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5125 if (TREE_PURPOSE (link) == function)
5126 return
5127 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5128
5129 for (fp = outer_function_chain; fp; fp = fp->next)
5130 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5131 if (TREE_PURPOSE (link) == function)
5132 {
5133 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5134 function);
5135 return round_trampoline_addr (tramp);
5136 }
5137
5138 /* None exists; we must make one. */
5139
5140 /* Find the `struct function' for the function containing FUNCTION. */
5141 fp = 0;
5142 fn_context = decl_function_context (function);
5143 if (fn_context != current_function_decl
5144 && fn_context != inline_function_decl)
5145 for (fp = outer_function_chain; fp; fp = fp->next)
5146 if (fp->decl == fn_context)
5147 break;
5148
5149 /* Allocate run-time space for this trampoline
5150 (usually in the defining function's stack frame). */
5151 #ifdef ALLOCATE_TRAMPOLINE
5152 tramp = ALLOCATE_TRAMPOLINE (fp);
5153 #else
5154 /* If rounding needed, allocate extra space
5155 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5156 #ifdef TRAMPOLINE_ALIGNMENT
5157 #define TRAMPOLINE_REAL_SIZE \
5158 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5159 #else
5160 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5161 #endif
5162 if (fp != 0)
5163 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5164 else
5165 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5166 #endif
5167
5168 /* Record the trampoline for reuse and note it for later initialization
5169 by expand_function_end. */
5170 if (fp != 0)
5171 {
5172 push_obstacks (fp->function_maybepermanent_obstack,
5173 fp->function_maybepermanent_obstack);
5174 rtlexp = make_node (RTL_EXPR);
5175 RTL_EXPR_RTL (rtlexp) = tramp;
5176 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5177 pop_obstacks ();
5178 }
5179 else
5180 {
5181 /* Make the RTL_EXPR node temporary, not momentary, so that the
5182 trampoline_list doesn't become garbage. */
5183 int momentary = suspend_momentary ();
5184 rtlexp = make_node (RTL_EXPR);
5185 resume_momentary (momentary);
5186
5187 RTL_EXPR_RTL (rtlexp) = tramp;
5188 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5189 }
5190
5191 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5192 return round_trampoline_addr (tramp);
5193 }
5194
5195 /* Given a trampoline address,
5196 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5197
5198 static rtx
5199 round_trampoline_addr (tramp)
5200 rtx tramp;
5201 {
5202 #ifdef TRAMPOLINE_ALIGNMENT
5203 /* Round address up to desired boundary. */
5204 rtx temp = gen_reg_rtx (Pmode);
5205 temp = expand_binop (Pmode, add_optab, tramp,
5206 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5207 temp, 0, OPTAB_LIB_WIDEN);
5208 tramp = expand_binop (Pmode, and_optab, temp,
5209 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5210 temp, 0, OPTAB_LIB_WIDEN);
5211 #endif
5212 return tramp;
5213 }
5214 \f
5215 /* The functions identify_blocks and reorder_blocks provide a way to
5216 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5217 duplicate portions of the RTL code. Call identify_blocks before
5218 changing the RTL, and call reorder_blocks after. */
5219
5220 /* Put all this function's BLOCK nodes including those that are chained
5221 onto the first block into a vector, and return it.
5222 Also store in each NOTE for the beginning or end of a block
5223 the index of that block in the vector.
5224 The arguments are BLOCK, the chain of top-level blocks of the function,
5225 and INSNS, the insn chain of the function. */
5226
5227 tree *
5228 identify_blocks (block, insns)
5229 tree block;
5230 rtx insns;
5231 {
5232 int n_blocks;
5233 tree *block_vector;
5234 int *block_stack;
5235 int depth = 0;
5236 int next_block_number = 1;
5237 int current_block_number = 1;
5238 rtx insn;
5239
5240 if (block == 0)
5241 return 0;
5242
5243 n_blocks = all_blocks (block, 0);
5244 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5245 block_stack = (int *) alloca (n_blocks * sizeof (int));
5246
5247 all_blocks (block, block_vector);
5248
5249 for (insn = insns; insn; insn = NEXT_INSN (insn))
5250 if (GET_CODE (insn) == NOTE)
5251 {
5252 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5253 {
5254 block_stack[depth++] = current_block_number;
5255 current_block_number = next_block_number;
5256 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5257 }
5258 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5259 {
5260 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5261 current_block_number = block_stack[--depth];
5262 }
5263 }
5264
5265 if (n_blocks != next_block_number)
5266 abort ();
5267
5268 return block_vector;
5269 }
5270
5271 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5272 and a revised instruction chain, rebuild the tree structure
5273 of BLOCK nodes to correspond to the new order of RTL.
5274 The new block tree is inserted below TOP_BLOCK.
5275 Returns the current top-level block. */
5276
5277 tree
5278 reorder_blocks (block_vector, block, insns)
5279 tree *block_vector;
5280 tree block;
5281 rtx insns;
5282 {
5283 tree current_block = block;
5284 rtx insn;
5285
5286 if (block_vector == 0)
5287 return block;
5288
5289 /* Prune the old trees away, so that it doesn't get in the way. */
5290 BLOCK_SUBBLOCKS (current_block) = 0;
5291 BLOCK_CHAIN (current_block) = 0;
5292
5293 for (insn = insns; insn; insn = NEXT_INSN (insn))
5294 if (GET_CODE (insn) == NOTE)
5295 {
5296 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5297 {
5298 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5299 /* If we have seen this block before, copy it. */
5300 if (TREE_ASM_WRITTEN (block))
5301 block = copy_node (block);
5302 BLOCK_SUBBLOCKS (block) = 0;
5303 TREE_ASM_WRITTEN (block) = 1;
5304 BLOCK_SUPERCONTEXT (block) = current_block;
5305 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5306 BLOCK_SUBBLOCKS (current_block) = block;
5307 current_block = block;
5308 NOTE_SOURCE_FILE (insn) = 0;
5309 }
5310 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5311 {
5312 BLOCK_SUBBLOCKS (current_block)
5313 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5314 current_block = BLOCK_SUPERCONTEXT (current_block);
5315 NOTE_SOURCE_FILE (insn) = 0;
5316 }
5317 }
5318
5319 BLOCK_SUBBLOCKS (current_block)
5320 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5321 return current_block;
5322 }
5323
5324 /* Reverse the order of elements in the chain T of blocks,
5325 and return the new head of the chain (old last element). */
5326
5327 static tree
5328 blocks_nreverse (t)
5329 tree t;
5330 {
5331 register tree prev = 0, decl, next;
5332 for (decl = t; decl; decl = next)
5333 {
5334 next = BLOCK_CHAIN (decl);
5335 BLOCK_CHAIN (decl) = prev;
5336 prev = decl;
5337 }
5338 return prev;
5339 }
5340
5341 /* Count the subblocks of the list starting with BLOCK, and list them
5342 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5343 blocks. */
5344
5345 static int
5346 all_blocks (block, vector)
5347 tree block;
5348 tree *vector;
5349 {
5350 int n_blocks = 0;
5351
5352 while (block)
5353 {
5354 TREE_ASM_WRITTEN (block) = 0;
5355
5356 /* Record this block. */
5357 if (vector)
5358 vector[n_blocks] = block;
5359
5360 ++n_blocks;
5361
5362 /* Record the subblocks, and their subblocks... */
5363 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5364 vector ? vector + n_blocks : 0);
5365 block = BLOCK_CHAIN (block);
5366 }
5367
5368 return n_blocks;
5369 }
5370 \f
5371 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5372 and initialize static variables for generating RTL for the statements
5373 of the function. */
5374
5375 void
5376 init_function_start (subr, filename, line)
5377 tree subr;
5378 char *filename;
5379 int line;
5380 {
5381 init_stmt_for_function ();
5382
5383 cse_not_expected = ! optimize;
5384
5385 /* Caller save not needed yet. */
5386 caller_save_needed = 0;
5387
5388 /* No stack slots have been made yet. */
5389 stack_slot_list = 0;
5390
5391 /* There is no stack slot for handling nonlocal gotos. */
5392 nonlocal_goto_handler_slot = 0;
5393 nonlocal_goto_stack_level = 0;
5394
5395 /* No labels have been declared for nonlocal use. */
5396 nonlocal_labels = 0;
5397
5398 /* No function calls so far in this function. */
5399 function_call_count = 0;
5400
5401 /* No parm regs have been allocated.
5402 (This is important for output_inline_function.) */
5403 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5404
5405 /* Initialize the RTL mechanism. */
5406 init_emit ();
5407
5408 /* Initialize the queue of pending postincrement and postdecrements,
5409 and some other info in expr.c. */
5410 init_expr ();
5411
5412 /* We haven't done register allocation yet. */
5413 reg_renumber = 0;
5414
5415 init_const_rtx_hash_table ();
5416
5417 current_function_name = (*decl_printable_name) (subr, 2);
5418
5419 /* Nonzero if this is a nested function that uses a static chain. */
5420
5421 current_function_needs_context
5422 = (decl_function_context (current_function_decl) != 0
5423 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5424
5425 /* Set if a call to setjmp is seen. */
5426 current_function_calls_setjmp = 0;
5427
5428 /* Set if a call to longjmp is seen. */
5429 current_function_calls_longjmp = 0;
5430
5431 current_function_calls_alloca = 0;
5432 current_function_has_nonlocal_label = 0;
5433 current_function_has_nonlocal_goto = 0;
5434 current_function_contains_functions = 0;
5435 current_function_sp_is_unchanging = 0;
5436 current_function_is_thunk = 0;
5437
5438 current_function_returns_pcc_struct = 0;
5439 current_function_returns_struct = 0;
5440 current_function_epilogue_delay_list = 0;
5441 current_function_uses_const_pool = 0;
5442 current_function_uses_pic_offset_table = 0;
5443 current_function_cannot_inline = 0;
5444
5445 /* We have not yet needed to make a label to jump to for tail-recursion. */
5446 tail_recursion_label = 0;
5447
5448 /* We haven't had a need to make a save area for ap yet. */
5449
5450 arg_pointer_save_area = 0;
5451
5452 /* No stack slots allocated yet. */
5453 frame_offset = 0;
5454
5455 /* No SAVE_EXPRs in this function yet. */
5456 save_expr_regs = 0;
5457
5458 /* No RTL_EXPRs in this function yet. */
5459 rtl_expr_chain = 0;
5460
5461 /* Set up to allocate temporaries. */
5462 init_temp_slots ();
5463
5464 /* Within function body, compute a type's size as soon it is laid out. */
5465 immediate_size_expand++;
5466
5467 /* We haven't made any trampolines for this function yet. */
5468 trampoline_list = 0;
5469
5470 init_pending_stack_adjust ();
5471 inhibit_defer_pop = 0;
5472
5473 current_function_outgoing_args_size = 0;
5474
5475 /* Prevent ever trying to delete the first instruction of a function.
5476 Also tell final how to output a linenum before the function prologue.
5477 Note linenums could be missing, e.g. when compiling a Java .class file. */
5478 if (line > 0)
5479 emit_line_note (filename, line);
5480
5481 /* Make sure first insn is a note even if we don't want linenums.
5482 This makes sure the first insn will never be deleted.
5483 Also, final expects a note to appear there. */
5484 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5485
5486 /* Set flags used by final.c. */
5487 if (aggregate_value_p (DECL_RESULT (subr)))
5488 {
5489 #ifdef PCC_STATIC_STRUCT_RETURN
5490 current_function_returns_pcc_struct = 1;
5491 #endif
5492 current_function_returns_struct = 1;
5493 }
5494
5495 /* Warn if this value is an aggregate type,
5496 regardless of which calling convention we are using for it. */
5497 if (warn_aggregate_return
5498 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5499 warning ("function returns an aggregate");
5500
5501 current_function_returns_pointer
5502 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5503
5504 /* Indicate that we need to distinguish between the return value of the
5505 present function and the return value of a function being called. */
5506 rtx_equal_function_value_matters = 1;
5507
5508 /* Indicate that we have not instantiated virtual registers yet. */
5509 virtuals_instantiated = 0;
5510
5511 /* Indicate we have no need of a frame pointer yet. */
5512 frame_pointer_needed = 0;
5513
5514 /* By default assume not varargs or stdarg. */
5515 current_function_varargs = 0;
5516 current_function_stdarg = 0;
5517 }
5518
5519 /* Indicate that the current function uses extra args
5520 not explicitly mentioned in the argument list in any fashion. */
5521
5522 void
5523 mark_varargs ()
5524 {
5525 current_function_varargs = 1;
5526 }
5527
5528 /* Expand a call to __main at the beginning of a possible main function. */
5529
5530 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5531 #undef HAS_INIT_SECTION
5532 #define HAS_INIT_SECTION
5533 #endif
5534
5535 void
5536 expand_main_function ()
5537 {
5538 #if !defined (HAS_INIT_SECTION)
5539 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5540 VOIDmode, 0);
5541 #endif /* not HAS_INIT_SECTION */
5542 }
5543 \f
5544 extern struct obstack permanent_obstack;
5545
5546 /* Start the RTL for a new function, and set variables used for
5547 emitting RTL.
5548 SUBR is the FUNCTION_DECL node.
5549 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5550 the function's parameters, which must be run at any return statement. */
5551
5552 void
5553 expand_function_start (subr, parms_have_cleanups)
5554 tree subr;
5555 int parms_have_cleanups;
5556 {
5557 register int i;
5558 tree tem;
5559 rtx last_ptr = NULL_RTX;
5560
5561 /* Make sure volatile mem refs aren't considered
5562 valid operands of arithmetic insns. */
5563 init_recog_no_volatile ();
5564
5565 /* Set this before generating any memory accesses. */
5566 current_function_check_memory_usage
5567 = (flag_check_memory_usage
5568 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5569
5570 current_function_instrument_entry_exit
5571 = (flag_instrument_function_entry_exit
5572 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5573
5574 /* If function gets a static chain arg, store it in the stack frame.
5575 Do this first, so it gets the first stack slot offset. */
5576 if (current_function_needs_context)
5577 {
5578 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5579
5580 /* Delay copying static chain if it is not a register to avoid
5581 conflicts with regs used for parameters. */
5582 if (! SMALL_REGISTER_CLASSES
5583 || GET_CODE (static_chain_incoming_rtx) == REG)
5584 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5585 }
5586
5587 /* If the parameters of this function need cleaning up, get a label
5588 for the beginning of the code which executes those cleanups. This must
5589 be done before doing anything with return_label. */
5590 if (parms_have_cleanups)
5591 cleanup_label = gen_label_rtx ();
5592 else
5593 cleanup_label = 0;
5594
5595 /* Make the label for return statements to jump to, if this machine
5596 does not have a one-instruction return and uses an epilogue,
5597 or if it returns a structure, or if it has parm cleanups. */
5598 #ifdef HAVE_return
5599 if (cleanup_label == 0 && HAVE_return
5600 && ! current_function_instrument_entry_exit
5601 && ! current_function_returns_pcc_struct
5602 && ! (current_function_returns_struct && ! optimize))
5603 return_label = 0;
5604 else
5605 return_label = gen_label_rtx ();
5606 #else
5607 return_label = gen_label_rtx ();
5608 #endif
5609
5610 /* Initialize rtx used to return the value. */
5611 /* Do this before assign_parms so that we copy the struct value address
5612 before any library calls that assign parms might generate. */
5613
5614 /* Decide whether to return the value in memory or in a register. */
5615 if (aggregate_value_p (DECL_RESULT (subr)))
5616 {
5617 /* Returning something that won't go in a register. */
5618 register rtx value_address = 0;
5619
5620 #ifdef PCC_STATIC_STRUCT_RETURN
5621 if (current_function_returns_pcc_struct)
5622 {
5623 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5624 value_address = assemble_static_space (size);
5625 }
5626 else
5627 #endif
5628 {
5629 /* Expect to be passed the address of a place to store the value.
5630 If it is passed as an argument, assign_parms will take care of
5631 it. */
5632 if (struct_value_incoming_rtx)
5633 {
5634 value_address = gen_reg_rtx (Pmode);
5635 emit_move_insn (value_address, struct_value_incoming_rtx);
5636 }
5637 }
5638 if (value_address)
5639 {
5640 DECL_RTL (DECL_RESULT (subr))
5641 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5642 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5643 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5644 }
5645 }
5646 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5647 /* If return mode is void, this decl rtl should not be used. */
5648 DECL_RTL (DECL_RESULT (subr)) = 0;
5649 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5650 {
5651 /* If function will end with cleanup code for parms,
5652 compute the return values into a pseudo reg,
5653 which we will copy into the true return register
5654 after the cleanups are done. */
5655
5656 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5657
5658 #ifdef PROMOTE_FUNCTION_RETURN
5659 tree type = TREE_TYPE (DECL_RESULT (subr));
5660 int unsignedp = TREE_UNSIGNED (type);
5661
5662 mode = promote_mode (type, mode, &unsignedp, 1);
5663 #endif
5664
5665 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5666 }
5667 else
5668 /* Scalar, returned in a register. */
5669 {
5670 #ifdef FUNCTION_OUTGOING_VALUE
5671 DECL_RTL (DECL_RESULT (subr))
5672 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5673 #else
5674 DECL_RTL (DECL_RESULT (subr))
5675 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5676 #endif
5677
5678 /* Mark this reg as the function's return value. */
5679 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5680 {
5681 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5682 /* Needed because we may need to move this to memory
5683 in case it's a named return value whose address is taken. */
5684 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5685 }
5686 }
5687
5688 /* Initialize rtx for parameters and local variables.
5689 In some cases this requires emitting insns. */
5690
5691 assign_parms (subr, 0);
5692
5693 /* Copy the static chain now if it wasn't a register. The delay is to
5694 avoid conflicts with the parameter passing registers. */
5695
5696 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5697 if (GET_CODE (static_chain_incoming_rtx) != REG)
5698 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5699
5700 /* The following was moved from init_function_start.
5701 The move is supposed to make sdb output more accurate. */
5702 /* Indicate the beginning of the function body,
5703 as opposed to parm setup. */
5704 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5705
5706 /* If doing stupid allocation, mark parms as born here. */
5707
5708 if (GET_CODE (get_last_insn ()) != NOTE)
5709 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5710 parm_birth_insn = get_last_insn ();
5711
5712 if (obey_regdecls)
5713 {
5714 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5715 use_variable (regno_reg_rtx[i]);
5716
5717 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5718 use_variable (current_function_internal_arg_pointer);
5719 }
5720
5721 context_display = 0;
5722 if (current_function_needs_context)
5723 {
5724 /* Fetch static chain values for containing functions. */
5725 tem = decl_function_context (current_function_decl);
5726 /* If not doing stupid register allocation copy the static chain
5727 pointer into a pseudo. If we have small register classes, copy
5728 the value from memory if static_chain_incoming_rtx is a REG. If
5729 we do stupid register allocation, we use the stack address
5730 generated above. */
5731 if (tem && ! obey_regdecls)
5732 {
5733 /* If the static chain originally came in a register, put it back
5734 there, then move it out in the next insn. The reason for
5735 this peculiar code is to satisfy function integration. */
5736 if (SMALL_REGISTER_CLASSES
5737 && GET_CODE (static_chain_incoming_rtx) == REG)
5738 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5739 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5740 }
5741
5742 while (tem)
5743 {
5744 tree rtlexp = make_node (RTL_EXPR);
5745
5746 RTL_EXPR_RTL (rtlexp) = last_ptr;
5747 context_display = tree_cons (tem, rtlexp, context_display);
5748 tem = decl_function_context (tem);
5749 if (tem == 0)
5750 break;
5751 /* Chain thru stack frames, assuming pointer to next lexical frame
5752 is found at the place we always store it. */
5753 #ifdef FRAME_GROWS_DOWNWARD
5754 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5755 #endif
5756 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5757 memory_address (Pmode, last_ptr)));
5758
5759 /* If we are not optimizing, ensure that we know that this
5760 piece of context is live over the entire function. */
5761 if (! optimize)
5762 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5763 save_expr_regs);
5764 }
5765 }
5766
5767 if (current_function_instrument_entry_exit)
5768 {
5769 rtx fun = DECL_RTL (current_function_decl);
5770 if (GET_CODE (fun) == MEM)
5771 fun = XEXP (fun, 0);
5772 else
5773 abort ();
5774 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
5775 fun, Pmode,
5776 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5777 0,
5778 hard_frame_pointer_rtx),
5779 Pmode);
5780 }
5781
5782 /* After the display initializations is where the tail-recursion label
5783 should go, if we end up needing one. Ensure we have a NOTE here
5784 since some things (like trampolines) get placed before this. */
5785 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5786
5787 /* Evaluate now the sizes of any types declared among the arguments. */
5788 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5789 {
5790 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5791 EXPAND_MEMORY_USE_BAD);
5792 /* Flush the queue in case this parameter declaration has
5793 side-effects. */
5794 emit_queue ();
5795 }
5796
5797 /* Make sure there is a line number after the function entry setup code. */
5798 force_next_line_note ();
5799 }
5800 \f
5801 /* Generate RTL for the end of the current function.
5802 FILENAME and LINE are the current position in the source file.
5803
5804 It is up to language-specific callers to do cleanups for parameters--
5805 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5806
5807 void
5808 expand_function_end (filename, line, end_bindings)
5809 char *filename;
5810 int line;
5811 int end_bindings;
5812 {
5813 register int i;
5814 tree link;
5815
5816 #ifdef TRAMPOLINE_TEMPLATE
5817 static rtx initial_trampoline;
5818 #endif
5819
5820 #ifdef NON_SAVING_SETJMP
5821 /* Don't put any variables in registers if we call setjmp
5822 on a machine that fails to restore the registers. */
5823 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5824 {
5825 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5826 setjmp_protect (DECL_INITIAL (current_function_decl));
5827
5828 setjmp_protect_args ();
5829 }
5830 #endif
5831
5832 /* Save the argument pointer if a save area was made for it. */
5833 if (arg_pointer_save_area)
5834 {
5835 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5836 emit_insn_before (x, tail_recursion_reentry);
5837 }
5838
5839 /* Initialize any trampolines required by this function. */
5840 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5841 {
5842 tree function = TREE_PURPOSE (link);
5843 rtx context = lookup_static_chain (function);
5844 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5845 #ifdef TRAMPOLINE_TEMPLATE
5846 rtx blktramp;
5847 #endif
5848 rtx seq;
5849
5850 #ifdef TRAMPOLINE_TEMPLATE
5851 /* First make sure this compilation has a template for
5852 initializing trampolines. */
5853 if (initial_trampoline == 0)
5854 {
5855 end_temporary_allocation ();
5856 initial_trampoline
5857 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5858 resume_temporary_allocation ();
5859 }
5860 #endif
5861
5862 /* Generate insns to initialize the trampoline. */
5863 start_sequence ();
5864 tramp = round_trampoline_addr (XEXP (tramp, 0));
5865 #ifdef TRAMPOLINE_TEMPLATE
5866 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5867 emit_block_move (blktramp, initial_trampoline,
5868 GEN_INT (TRAMPOLINE_SIZE),
5869 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5870 #endif
5871 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5872 seq = get_insns ();
5873 end_sequence ();
5874
5875 /* Put those insns at entry to the containing function (this one). */
5876 emit_insns_before (seq, tail_recursion_reentry);
5877 }
5878
5879 /* If we are doing stack checking and this function makes calls,
5880 do a stack probe at the start of the function to ensure we have enough
5881 space for another stack frame. */
5882 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5883 {
5884 rtx insn, seq;
5885
5886 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5887 if (GET_CODE (insn) == CALL_INSN)
5888 {
5889 start_sequence ();
5890 probe_stack_range (STACK_CHECK_PROTECT,
5891 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5892 seq = get_insns ();
5893 end_sequence ();
5894 emit_insns_before (seq, tail_recursion_reentry);
5895 break;
5896 }
5897 }
5898
5899 /* Warn about unused parms if extra warnings were specified. */
5900 if (warn_unused && extra_warnings)
5901 {
5902 tree decl;
5903
5904 for (decl = DECL_ARGUMENTS (current_function_decl);
5905 decl; decl = TREE_CHAIN (decl))
5906 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5907 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5908 warning_with_decl (decl, "unused parameter `%s'");
5909 }
5910
5911 /* Delete handlers for nonlocal gotos if nothing uses them. */
5912 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5913 delete_handlers ();
5914
5915 /* End any sequences that failed to be closed due to syntax errors. */
5916 while (in_sequence_p ())
5917 end_sequence ();
5918
5919 /* Outside function body, can't compute type's actual size
5920 until next function's body starts. */
5921 immediate_size_expand--;
5922
5923 /* If doing stupid register allocation,
5924 mark register parms as dying here. */
5925
5926 if (obey_regdecls)
5927 {
5928 rtx tem;
5929 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5930 use_variable (regno_reg_rtx[i]);
5931
5932 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5933
5934 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5935 {
5936 use_variable (XEXP (tem, 0));
5937 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5938 }
5939
5940 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5941 use_variable (current_function_internal_arg_pointer);
5942 }
5943
5944 clear_pending_stack_adjust ();
5945 do_pending_stack_adjust ();
5946
5947 /* Mark the end of the function body.
5948 If control reaches this insn, the function can drop through
5949 without returning a value. */
5950 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5951
5952 /* Must mark the last line number note in the function, so that the test
5953 coverage code can avoid counting the last line twice. This just tells
5954 the code to ignore the immediately following line note, since there
5955 already exists a copy of this note somewhere above. This line number
5956 note is still needed for debugging though, so we can't delete it. */
5957 if (flag_test_coverage)
5958 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
5959
5960 /* Output a linenumber for the end of the function.
5961 SDB depends on this. */
5962 emit_line_note_force (filename, line);
5963
5964 /* Output the label for the actual return from the function,
5965 if one is expected. This happens either because a function epilogue
5966 is used instead of a return instruction, or because a return was done
5967 with a goto in order to run local cleanups, or because of pcc-style
5968 structure returning. */
5969
5970 if (return_label)
5971 emit_label (return_label);
5972
5973 /* C++ uses this. */
5974 if (end_bindings)
5975 expand_end_bindings (0, 0, 0);
5976
5977 /* Now handle any leftover exception regions that may have been
5978 created for the parameters. */
5979 {
5980 rtx last = get_last_insn ();
5981 rtx label;
5982
5983 expand_leftover_cleanups ();
5984
5985 /* If the above emitted any code, may sure we jump around it. */
5986 if (last != get_last_insn ())
5987 {
5988 label = gen_label_rtx ();
5989 last = emit_jump_insn_after (gen_jump (label), last);
5990 last = emit_barrier_after (last);
5991 emit_label (label);
5992 }
5993 }
5994
5995 if (current_function_instrument_entry_exit)
5996 {
5997 rtx fun = DECL_RTL (current_function_decl);
5998 if (GET_CODE (fun) == MEM)
5999 fun = XEXP (fun, 0);
6000 else
6001 abort ();
6002 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6003 fun, Pmode,
6004 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6005 0,
6006 hard_frame_pointer_rtx),
6007 Pmode);
6008 }
6009
6010 /* If we had calls to alloca, and this machine needs
6011 an accurate stack pointer to exit the function,
6012 insert some code to save and restore the stack pointer. */
6013 #ifdef EXIT_IGNORE_STACK
6014 if (! EXIT_IGNORE_STACK)
6015 #endif
6016 if (current_function_calls_alloca)
6017 {
6018 rtx tem = 0;
6019
6020 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6021 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6022 }
6023
6024 /* If scalar return value was computed in a pseudo-reg,
6025 copy that to the hard return register. */
6026 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6027 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6028 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6029 >= FIRST_PSEUDO_REGISTER))
6030 {
6031 rtx real_decl_result;
6032
6033 #ifdef FUNCTION_OUTGOING_VALUE
6034 real_decl_result
6035 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6036 current_function_decl);
6037 #else
6038 real_decl_result
6039 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6040 current_function_decl);
6041 #endif
6042 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6043 /* If this is a BLKmode structure being returned in registers, then use
6044 the mode computed in expand_return. */
6045 if (GET_MODE (real_decl_result) == BLKmode)
6046 PUT_MODE (real_decl_result,
6047 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6048 emit_move_insn (real_decl_result,
6049 DECL_RTL (DECL_RESULT (current_function_decl)));
6050 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6051
6052 /* The delay slot scheduler assumes that current_function_return_rtx
6053 holds the hard register containing the return value, not a temporary
6054 pseudo. */
6055 current_function_return_rtx = real_decl_result;
6056 }
6057
6058 /* If returning a structure, arrange to return the address of the value
6059 in a place where debuggers expect to find it.
6060
6061 If returning a structure PCC style,
6062 the caller also depends on this value.
6063 And current_function_returns_pcc_struct is not necessarily set. */
6064 if (current_function_returns_struct
6065 || current_function_returns_pcc_struct)
6066 {
6067 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6068 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6069 #ifdef FUNCTION_OUTGOING_VALUE
6070 rtx outgoing
6071 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6072 current_function_decl);
6073 #else
6074 rtx outgoing
6075 = FUNCTION_VALUE (build_pointer_type (type),
6076 current_function_decl);
6077 #endif
6078
6079 /* Mark this as a function return value so integrate will delete the
6080 assignment and USE below when inlining this function. */
6081 REG_FUNCTION_VALUE_P (outgoing) = 1;
6082
6083 emit_move_insn (outgoing, value_address);
6084 use_variable (outgoing);
6085 }
6086
6087 /* If this is an implementation of __throw, do what's necessary to
6088 communicate between __builtin_eh_return and the epilogue. */
6089 expand_eh_return ();
6090
6091 /* Output a return insn if we are using one.
6092 Otherwise, let the rtl chain end here, to drop through
6093 into the epilogue. */
6094
6095 #ifdef HAVE_return
6096 if (HAVE_return)
6097 {
6098 emit_jump_insn (gen_return ());
6099 emit_barrier ();
6100 }
6101 #endif
6102
6103 /* Fix up any gotos that jumped out to the outermost
6104 binding level of the function.
6105 Must follow emitting RETURN_LABEL. */
6106
6107 /* If you have any cleanups to do at this point,
6108 and they need to create temporary variables,
6109 then you will lose. */
6110 expand_fixups (get_insns ());
6111 }
6112 \f
6113 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6114
6115 static int *prologue;
6116 static int *epilogue;
6117
6118 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6119 or a single insn). */
6120
6121 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6122 static int *
6123 record_insns (insns)
6124 rtx insns;
6125 {
6126 int *vec;
6127
6128 if (GET_CODE (insns) == SEQUENCE)
6129 {
6130 int len = XVECLEN (insns, 0);
6131 vec = (int *) oballoc ((len + 1) * sizeof (int));
6132 vec[len] = 0;
6133 while (--len >= 0)
6134 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6135 }
6136 else
6137 {
6138 vec = (int *) oballoc (2 * sizeof (int));
6139 vec[0] = INSN_UID (insns);
6140 vec[1] = 0;
6141 }
6142 return vec;
6143 }
6144
6145 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6146
6147 static int
6148 contains (insn, vec)
6149 rtx insn;
6150 int *vec;
6151 {
6152 register int i, j;
6153
6154 if (GET_CODE (insn) == INSN
6155 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6156 {
6157 int count = 0;
6158 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6159 for (j = 0; vec[j]; j++)
6160 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6161 count++;
6162 return count;
6163 }
6164 else
6165 {
6166 for (j = 0; vec[j]; j++)
6167 if (INSN_UID (insn) == vec[j])
6168 return 1;
6169 }
6170 return 0;
6171 }
6172 #endif /* HAVE_prologue || HAVE_epilogue */
6173
6174 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6175 this into place with notes indicating where the prologue ends and where
6176 the epilogue begins. Update the basic block information when possible. */
6177
6178 void
6179 thread_prologue_and_epilogue_insns (f)
6180 rtx f;
6181 {
6182 #ifdef HAVE_prologue
6183 if (HAVE_prologue)
6184 {
6185 rtx head, seq;
6186
6187 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6188 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6189 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6190 seq = gen_prologue ();
6191 head = emit_insn_after (seq, f);
6192
6193 /* Include the new prologue insns in the first block. Ignore them
6194 if they form a basic block unto themselves. */
6195 if (basic_block_head && n_basic_blocks
6196 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
6197 basic_block_head[0] = NEXT_INSN (f);
6198
6199 /* Retain a map of the prologue insns. */
6200 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6201 }
6202 else
6203 #endif
6204 prologue = 0;
6205
6206 #ifdef HAVE_epilogue
6207 if (HAVE_epilogue)
6208 {
6209 rtx insn = get_last_insn ();
6210 rtx prev = prev_nonnote_insn (insn);
6211
6212 /* If we end with a BARRIER, we don't need an epilogue. */
6213 if (! (prev && GET_CODE (prev) == BARRIER))
6214 {
6215 rtx tail, seq, tem;
6216 rtx first_use = 0;
6217 rtx last_use = 0;
6218
6219 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6220 epilogue insns, the USE insns at the end of a function,
6221 the jump insn that returns, and then a BARRIER. */
6222
6223 /* Move the USE insns at the end of a function onto a list. */
6224 while (prev
6225 && GET_CODE (prev) == INSN
6226 && GET_CODE (PATTERN (prev)) == USE)
6227 {
6228 tem = prev;
6229 prev = prev_nonnote_insn (prev);
6230
6231 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6232 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
6233 if (first_use)
6234 {
6235 NEXT_INSN (tem) = first_use;
6236 PREV_INSN (first_use) = tem;
6237 }
6238 first_use = tem;
6239 if (!last_use)
6240 last_use = tem;
6241 }
6242
6243 emit_barrier_after (insn);
6244
6245 seq = gen_epilogue ();
6246 tail = emit_jump_insn_after (seq, insn);
6247
6248 /* Insert the USE insns immediately before the return insn, which
6249 must be the first instruction before the final barrier. */
6250 if (first_use)
6251 {
6252 tem = prev_nonnote_insn (get_last_insn ());
6253 NEXT_INSN (PREV_INSN (tem)) = first_use;
6254 PREV_INSN (first_use) = PREV_INSN (tem);
6255 PREV_INSN (tem) = last_use;
6256 NEXT_INSN (last_use) = tem;
6257 }
6258
6259 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6260
6261 /* Include the new epilogue insns in the last block. Ignore
6262 them if they form a basic block unto themselves. */
6263 if (basic_block_end && n_basic_blocks
6264 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
6265 basic_block_end[n_basic_blocks - 1] = tail;
6266
6267 /* Retain a map of the epilogue insns. */
6268 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6269 return;
6270 }
6271 }
6272 #endif
6273 epilogue = 0;
6274 }
6275
6276 /* Reposition the prologue-end and epilogue-begin notes after instruction
6277 scheduling and delayed branch scheduling. */
6278
6279 void
6280 reposition_prologue_and_epilogue_notes (f)
6281 rtx f;
6282 {
6283 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6284 /* Reposition the prologue and epilogue notes. */
6285 if (n_basic_blocks)
6286 {
6287 rtx next, prev;
6288 int len;
6289
6290 if (prologue)
6291 {
6292 register rtx insn, note = 0;
6293
6294 /* Scan from the beginning until we reach the last prologue insn.
6295 We apparently can't depend on basic_block_{head,end} after
6296 reorg has run. */
6297 for (len = 0; prologue[len]; len++)
6298 ;
6299 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6300 {
6301 if (GET_CODE (insn) == NOTE)
6302 {
6303 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6304 note = insn;
6305 }
6306 else if ((len -= contains (insn, prologue)) == 0)
6307 {
6308 /* Find the prologue-end note if we haven't already, and
6309 move it to just after the last prologue insn. */
6310 if (note == 0)
6311 {
6312 for (note = insn; (note = NEXT_INSN (note));)
6313 if (GET_CODE (note) == NOTE
6314 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6315 break;
6316 }
6317
6318 next = NEXT_INSN (note);
6319 prev = PREV_INSN (note);
6320 if (prev)
6321 NEXT_INSN (prev) = next;
6322 if (next)
6323 PREV_INSN (next) = prev;
6324
6325 /* Whether or not we can depend on basic_block_head,
6326 attempt to keep it up-to-date. */
6327 if (basic_block_head[0] == note)
6328 basic_block_head[0] = next;
6329
6330 add_insn_after (note, insn);
6331 }
6332 }
6333 }
6334
6335 if (epilogue)
6336 {
6337 register rtx insn, note = 0;
6338
6339 /* Scan from the end until we reach the first epilogue insn.
6340 We apparently can't depend on basic_block_{head,end} after
6341 reorg has run. */
6342 for (len = 0; epilogue[len]; len++)
6343 ;
6344 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6345 {
6346 if (GET_CODE (insn) == NOTE)
6347 {
6348 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6349 note = insn;
6350 }
6351 else if ((len -= contains (insn, epilogue)) == 0)
6352 {
6353 /* Find the epilogue-begin note if we haven't already, and
6354 move it to just before the first epilogue insn. */
6355 if (note == 0)
6356 {
6357 for (note = insn; (note = PREV_INSN (note));)
6358 if (GET_CODE (note) == NOTE
6359 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6360 break;
6361 }
6362 next = NEXT_INSN (note);
6363 prev = PREV_INSN (note);
6364 if (prev)
6365 NEXT_INSN (prev) = next;
6366 if (next)
6367 PREV_INSN (next) = prev;
6368
6369 /* Whether or not we can depend on basic_block_head,
6370 attempt to keep it up-to-date. */
6371 if (n_basic_blocks
6372 && basic_block_head[n_basic_blocks-1] == insn)
6373 basic_block_head[n_basic_blocks-1] = note;
6374
6375 add_insn_before (note, insn);
6376 }
6377 }
6378 }
6379 }
6380 #endif /* HAVE_prologue or HAVE_epilogue */
6381 }
This page took 0.334227 seconds and 5 git commands to generate.