]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
function.c (purge_addressof_1): Handle case when a register has been used in a wider...
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59
60 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
61 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
62 #endif
63
64 #ifndef TRAMPOLINE_ALIGNMENT
65 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
66 #endif
67
68 /* Some systems use __main in a way incompatible with its use in gcc, in these
69 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
70 give the same symbol without quotes for an alternative entry point. You
71 must define both, or neither. */
72 #ifndef NAME__MAIN
73 #define NAME__MAIN "__main"
74 #define SYMBOL__MAIN __main
75 #endif
76
77 /* Round a value to the lowest integer less than it that is a multiple of
78 the required alignment. Avoid using division in case the value is
79 negative. Assume the alignment is a power of two. */
80 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
81
82 /* Similar, but round to the next highest integer that meets the
83 alignment. */
84 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
85
86 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
87 during rtl generation. If they are different register numbers, this is
88 always true. It may also be true if
89 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
90 generation. See fix_lexical_addr for details. */
91
92 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
93 #define NEED_SEPARATE_AP
94 #endif
95
96 /* Number of bytes of args popped by function being compiled on its return.
97 Zero if no bytes are to be popped.
98 May affect compilation of return insn or of function epilogue. */
99
100 int current_function_pops_args;
101
102 /* Nonzero if function being compiled needs to be given an address
103 where the value should be stored. */
104
105 int current_function_returns_struct;
106
107 /* Nonzero if function being compiled needs to
108 return the address of where it has put a structure value. */
109
110 int current_function_returns_pcc_struct;
111
112 /* Nonzero if function being compiled needs to be passed a static chain. */
113
114 int current_function_needs_context;
115
116 /* Nonzero if function being compiled can call setjmp. */
117
118 int current_function_calls_setjmp;
119
120 /* Nonzero if function being compiled can call longjmp. */
121
122 int current_function_calls_longjmp;
123
124 /* Nonzero if function being compiled receives nonlocal gotos
125 from nested functions. */
126
127 int current_function_has_nonlocal_label;
128
129 /* Nonzero if function being compiled has nonlocal gotos to parent
130 function. */
131
132 int current_function_has_nonlocal_goto;
133
134 /* Nonzero if this function has a computed goto.
135
136 It is computed during find_basic_blocks or during stupid life
137 analysis. */
138
139 int current_function_has_computed_jump;
140
141 /* Nonzero if function being compiled contains nested functions. */
142
143 int current_function_contains_functions;
144
145 /* Nonzero if function being compiled doesn't modify the stack pointer
146 (ignoring the prologue and epilogue). This is only valid after
147 life_analysis has run. */
148
149 int current_function_sp_is_unchanging;
150
151 /* Nonzero if the function being compiled has the address of its
152 labels taken. */
153
154 int current_function_addresses_labels;
155
156 /* Nonzero if the current function is a thunk (a lightweight function that
157 just adjusts one of its arguments and forwards to another function), so
158 we should try to cut corners where we can. */
159 int current_function_is_thunk;
160
161 /* Nonzero if function being compiled can call alloca,
162 either as a subroutine or builtin. */
163
164 int current_function_calls_alloca;
165
166 /* Nonzero if the current function returns a pointer type */
167
168 int current_function_returns_pointer;
169
170 /* If some insns can be deferred to the delay slots of the epilogue, the
171 delay list for them is recorded here. */
172
173 rtx current_function_epilogue_delay_list;
174
175 /* If function's args have a fixed size, this is that size, in bytes.
176 Otherwise, it is -1.
177 May affect compilation of return insn or of function epilogue. */
178
179 int current_function_args_size;
180
181 /* # bytes the prologue should push and pretend that the caller pushed them.
182 The prologue must do this, but only if parms can be passed in registers. */
183
184 int current_function_pretend_args_size;
185
186 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
187 defined, the needed space is pushed by the prologue. */
188
189 int current_function_outgoing_args_size;
190
191 /* This is the offset from the arg pointer to the place where the first
192 anonymous arg can be found, if there is one. */
193
194 rtx current_function_arg_offset_rtx;
195
196 /* Nonzero if current function uses varargs.h or equivalent.
197 Zero for functions that use stdarg.h. */
198
199 int current_function_varargs;
200
201 /* Nonzero if current function uses stdarg.h or equivalent.
202 Zero for functions that use varargs.h. */
203
204 int current_function_stdarg;
205
206 /* Quantities of various kinds of registers
207 used for the current function's args. */
208
209 CUMULATIVE_ARGS current_function_args_info;
210
211 /* Name of function now being compiled. */
212
213 char *current_function_name;
214
215 /* If non-zero, an RTL expression for the location at which the current
216 function returns its result. If the current function returns its
217 result in a register, current_function_return_rtx will always be
218 the hard register containing the result. */
219
220 rtx current_function_return_rtx;
221
222 /* Nonzero if the current function uses the constant pool. */
223
224 int current_function_uses_const_pool;
225
226 /* Nonzero if the current function uses pic_offset_table_rtx. */
227 int current_function_uses_pic_offset_table;
228
229 /* The arg pointer hard register, or the pseudo into which it was copied. */
230 rtx current_function_internal_arg_pointer;
231
232 /* Language-specific reason why the current function cannot be made inline. */
233 char *current_function_cannot_inline;
234
235 /* Nonzero if instrumentation calls for function entry and exit should be
236 generated. */
237 int current_function_instrument_entry_exit;
238
239 /* Nonzero if memory access checking be enabled in the current function. */
240 int current_function_check_memory_usage;
241
242 /* The FUNCTION_DECL for an inline function currently being expanded. */
243 tree inline_function_decl;
244
245 /* Number of function calls seen so far in current function. */
246
247 int function_call_count;
248
249 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
250 (labels to which there can be nonlocal gotos from nested functions)
251 in this function. */
252
253 tree nonlocal_labels;
254
255 /* List (chain of EXPR_LIST) of stack slots that hold the current handlers
256 for nonlocal gotos. There is one for every nonlocal label in the function;
257 this list matches the one in nonlocal_labels.
258 Zero when function does not have nonlocal labels. */
259
260 rtx nonlocal_goto_handler_slots;
261
262 /* RTX for stack slot that holds the stack pointer value to restore
263 for a nonlocal goto.
264 Zero when function does not have nonlocal labels. */
265
266 rtx nonlocal_goto_stack_level;
267
268 /* Label that will go on parm cleanup code, if any.
269 Jumping to this label runs cleanup code for parameters, if
270 such code must be run. Following this code is the logical return label. */
271
272 rtx cleanup_label;
273
274 /* Label that will go on function epilogue.
275 Jumping to this label serves as a "return" instruction
276 on machines which require execution of the epilogue on all returns. */
277
278 rtx return_label;
279
280 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
281 So we can mark them all live at the end of the function, if nonopt. */
282 rtx save_expr_regs;
283
284 /* List (chain of EXPR_LISTs) of all stack slots in this function.
285 Made for the sake of unshare_all_rtl. */
286 rtx stack_slot_list;
287
288 /* Chain of all RTL_EXPRs that have insns in them. */
289 tree rtl_expr_chain;
290
291 /* Label to jump back to for tail recursion, or 0 if we have
292 not yet needed one for this function. */
293 rtx tail_recursion_label;
294
295 /* Place after which to insert the tail_recursion_label if we need one. */
296 rtx tail_recursion_reentry;
297
298 /* Location at which to save the argument pointer if it will need to be
299 referenced. There are two cases where this is done: if nonlocal gotos
300 exist, or if vars stored at an offset from the argument pointer will be
301 needed by inner routines. */
302
303 rtx arg_pointer_save_area;
304
305 /* Offset to end of allocated area of stack frame.
306 If stack grows down, this is the address of the last stack slot allocated.
307 If stack grows up, this is the address for the next slot. */
308 HOST_WIDE_INT frame_offset;
309
310 /* List (chain of TREE_LISTs) of static chains for containing functions.
311 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
312 in an RTL_EXPR in the TREE_VALUE. */
313 static tree context_display;
314
315 /* List (chain of TREE_LISTs) of trampolines for nested functions.
316 The trampoline sets up the static chain and jumps to the function.
317 We supply the trampoline's address when the function's address is requested.
318
319 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
320 in an RTL_EXPR in the TREE_VALUE. */
321 static tree trampoline_list;
322
323 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
324 static rtx parm_birth_insn;
325
326 #if 0
327 /* Nonzero if a stack slot has been generated whose address is not
328 actually valid. It means that the generated rtl must all be scanned
329 to detect and correct the invalid addresses where they occur. */
330 static int invalid_stack_slot;
331 #endif
332
333 /* Last insn of those whose job was to put parms into their nominal homes. */
334 static rtx last_parm_insn;
335
336 /* 1 + last pseudo register number possibly used for loading a copy
337 of a parameter of this function. */
338 int max_parm_reg;
339
340 /* Vector indexed by REGNO, containing location on stack in which
341 to put the parm which is nominally in pseudo register REGNO,
342 if we discover that that parm must go in the stack. The highest
343 element in this vector is one less than MAX_PARM_REG, above. */
344 rtx *parm_reg_stack_loc;
345
346 /* Nonzero once virtual register instantiation has been done.
347 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
348 static int virtuals_instantiated;
349
350 /* These variables hold pointers to functions to
351 save and restore machine-specific data,
352 in push_function_context and pop_function_context. */
353 void (*save_machine_status) PROTO((struct function *));
354 void (*restore_machine_status) PROTO((struct function *));
355
356 /* Nonzero if we need to distinguish between the return value of this function
357 and the return value of a function called by this function. This helps
358 integrate.c */
359
360 extern int rtx_equal_function_value_matters;
361 extern tree sequence_rtl_expr;
362 \f
363 /* In order to evaluate some expressions, such as function calls returning
364 structures in memory, we need to temporarily allocate stack locations.
365 We record each allocated temporary in the following structure.
366
367 Associated with each temporary slot is a nesting level. When we pop up
368 one level, all temporaries associated with the previous level are freed.
369 Normally, all temporaries are freed after the execution of the statement
370 in which they were created. However, if we are inside a ({...}) grouping,
371 the result may be in a temporary and hence must be preserved. If the
372 result could be in a temporary, we preserve it if we can determine which
373 one it is in. If we cannot determine which temporary may contain the
374 result, all temporaries are preserved. A temporary is preserved by
375 pretending it was allocated at the previous nesting level.
376
377 Automatic variables are also assigned temporary slots, at the nesting
378 level where they are defined. They are marked a "kept" so that
379 free_temp_slots will not free them. */
380
381 struct temp_slot
382 {
383 /* Points to next temporary slot. */
384 struct temp_slot *next;
385 /* The rtx to used to reference the slot. */
386 rtx slot;
387 /* The rtx used to represent the address if not the address of the
388 slot above. May be an EXPR_LIST if multiple addresses exist. */
389 rtx address;
390 /* The size, in units, of the slot. */
391 HOST_WIDE_INT size;
392 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
393 tree rtl_expr;
394 /* Non-zero if this temporary is currently in use. */
395 char in_use;
396 /* Non-zero if this temporary has its address taken. */
397 char addr_taken;
398 /* Nesting level at which this slot is being used. */
399 int level;
400 /* Non-zero if this should survive a call to free_temp_slots. */
401 int keep;
402 /* The offset of the slot from the frame_pointer, including extra space
403 for alignment. This info is for combine_temp_slots. */
404 HOST_WIDE_INT base_offset;
405 /* The size of the slot, including extra space for alignment. This
406 info is for combine_temp_slots. */
407 HOST_WIDE_INT full_size;
408 };
409
410 /* List of all temporaries allocated, both available and in use. */
411
412 struct temp_slot *temp_slots;
413
414 /* Current nesting level for temporaries. */
415
416 int temp_slot_level;
417
418 /* Current nesting level for variables in a block. */
419
420 int var_temp_slot_level;
421
422 /* When temporaries are created by TARGET_EXPRs, they are created at
423 this level of temp_slot_level, so that they can remain allocated
424 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
425 of TARGET_EXPRs. */
426 int target_temp_slot_level;
427 \f
428 /* This structure is used to record MEMs or pseudos used to replace VAR, any
429 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
430 maintain this list in case two operands of an insn were required to match;
431 in that case we must ensure we use the same replacement. */
432
433 struct fixup_replacement
434 {
435 rtx old;
436 rtx new;
437 struct fixup_replacement *next;
438 };
439
440 /* Forward declarations. */
441
442 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
443 int, struct function *));
444 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
445 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
446 enum machine_mode, enum machine_mode,
447 int, int, int));
448 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
449 static struct fixup_replacement
450 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
451 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
452 rtx, int));
453 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
454 struct fixup_replacement **));
455 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
456 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
457 static rtx fixup_stack_1 PROTO((rtx, rtx));
458 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
459 static void instantiate_decls PROTO((tree, int));
460 static void instantiate_decls_1 PROTO((tree, int));
461 static void instantiate_decl PROTO((rtx, int, int));
462 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
463 static void delete_handlers PROTO((void));
464 static void pad_to_arg_alignment PROTO((struct args_size *, int));
465 #ifndef ARGS_GROW_DOWNWARD
466 static void pad_below PROTO((struct args_size *, enum machine_mode,
467 tree));
468 #endif
469 #ifdef ARGS_GROW_DOWNWARD
470 static tree round_down PROTO((tree, int));
471 #endif
472 static rtx round_trampoline_addr PROTO((rtx));
473 static tree blocks_nreverse PROTO((tree));
474 static int all_blocks PROTO((tree, tree *));
475 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
476 static int *record_insns PROTO((rtx));
477 static int contains PROTO((rtx, int *));
478 #endif /* HAVE_prologue || HAVE_epilogue */
479 static void put_addressof_into_stack PROTO((rtx));
480 static void purge_addressof_1 PROTO((rtx *, rtx, int, int));
481 \f
482 /* Pointer to chain of `struct function' for containing functions. */
483 struct function *outer_function_chain;
484
485 /* Given a function decl for a containing function,
486 return the `struct function' for it. */
487
488 struct function *
489 find_function_data (decl)
490 tree decl;
491 {
492 struct function *p;
493
494 for (p = outer_function_chain; p; p = p->next)
495 if (p->decl == decl)
496 return p;
497
498 abort ();
499 }
500
501 /* Save the current context for compilation of a nested function.
502 This is called from language-specific code.
503 The caller is responsible for saving any language-specific status,
504 since this function knows only about language-independent variables. */
505
506 void
507 push_function_context_to (context)
508 tree context;
509 {
510 struct function *p = (struct function *) xmalloc (sizeof (struct function));
511
512 p->next = outer_function_chain;
513 outer_function_chain = p;
514
515 p->name = current_function_name;
516 p->decl = current_function_decl;
517 p->pops_args = current_function_pops_args;
518 p->returns_struct = current_function_returns_struct;
519 p->returns_pcc_struct = current_function_returns_pcc_struct;
520 p->returns_pointer = current_function_returns_pointer;
521 p->needs_context = current_function_needs_context;
522 p->calls_setjmp = current_function_calls_setjmp;
523 p->calls_longjmp = current_function_calls_longjmp;
524 p->calls_alloca = current_function_calls_alloca;
525 p->has_nonlocal_label = current_function_has_nonlocal_label;
526 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
527 p->contains_functions = current_function_contains_functions;
528 p->addresses_labels = current_function_addresses_labels;
529 p->is_thunk = current_function_is_thunk;
530 p->args_size = current_function_args_size;
531 p->pretend_args_size = current_function_pretend_args_size;
532 p->arg_offset_rtx = current_function_arg_offset_rtx;
533 p->varargs = current_function_varargs;
534 p->stdarg = current_function_stdarg;
535 p->uses_const_pool = current_function_uses_const_pool;
536 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
537 p->internal_arg_pointer = current_function_internal_arg_pointer;
538 p->cannot_inline = current_function_cannot_inline;
539 p->max_parm_reg = max_parm_reg;
540 p->parm_reg_stack_loc = parm_reg_stack_loc;
541 p->outgoing_args_size = current_function_outgoing_args_size;
542 p->return_rtx = current_function_return_rtx;
543 p->nonlocal_goto_handler_slots = nonlocal_goto_handler_slots;
544 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
545 p->nonlocal_labels = nonlocal_labels;
546 p->cleanup_label = cleanup_label;
547 p->return_label = return_label;
548 p->save_expr_regs = save_expr_regs;
549 p->stack_slot_list = stack_slot_list;
550 p->parm_birth_insn = parm_birth_insn;
551 p->frame_offset = frame_offset;
552 p->tail_recursion_label = tail_recursion_label;
553 p->tail_recursion_reentry = tail_recursion_reentry;
554 p->arg_pointer_save_area = arg_pointer_save_area;
555 p->rtl_expr_chain = rtl_expr_chain;
556 p->last_parm_insn = last_parm_insn;
557 p->context_display = context_display;
558 p->trampoline_list = trampoline_list;
559 p->function_call_count = function_call_count;
560 p->temp_slots = temp_slots;
561 p->temp_slot_level = temp_slot_level;
562 p->target_temp_slot_level = target_temp_slot_level;
563 p->var_temp_slot_level = var_temp_slot_level;
564 p->fixup_var_refs_queue = 0;
565 p->epilogue_delay_list = current_function_epilogue_delay_list;
566 p->args_info = current_function_args_info;
567 p->check_memory_usage = current_function_check_memory_usage;
568 p->instrument_entry_exit = current_function_instrument_entry_exit;
569
570 save_tree_status (p, context);
571 save_storage_status (p);
572 save_emit_status (p);
573 save_expr_status (p);
574 save_stmt_status (p);
575 save_varasm_status (p, context);
576 if (save_machine_status)
577 (*save_machine_status) (p);
578 }
579
580 void
581 push_function_context ()
582 {
583 push_function_context_to (current_function_decl);
584 }
585
586 /* Restore the last saved context, at the end of a nested function.
587 This function is called from language-specific code. */
588
589 void
590 pop_function_context_from (context)
591 tree context;
592 {
593 struct function *p = outer_function_chain;
594 struct var_refs_queue *queue;
595
596 outer_function_chain = p->next;
597
598 current_function_contains_functions
599 = p->contains_functions || p->inline_obstacks
600 || context == current_function_decl;
601 current_function_addresses_labels = p->addresses_labels;
602 current_function_name = p->name;
603 current_function_decl = p->decl;
604 current_function_pops_args = p->pops_args;
605 current_function_returns_struct = p->returns_struct;
606 current_function_returns_pcc_struct = p->returns_pcc_struct;
607 current_function_returns_pointer = p->returns_pointer;
608 current_function_needs_context = p->needs_context;
609 current_function_calls_setjmp = p->calls_setjmp;
610 current_function_calls_longjmp = p->calls_longjmp;
611 current_function_calls_alloca = p->calls_alloca;
612 current_function_has_nonlocal_label = p->has_nonlocal_label;
613 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
614 current_function_is_thunk = p->is_thunk;
615 current_function_args_size = p->args_size;
616 current_function_pretend_args_size = p->pretend_args_size;
617 current_function_arg_offset_rtx = p->arg_offset_rtx;
618 current_function_varargs = p->varargs;
619 current_function_stdarg = p->stdarg;
620 current_function_uses_const_pool = p->uses_const_pool;
621 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
622 current_function_internal_arg_pointer = p->internal_arg_pointer;
623 current_function_cannot_inline = p->cannot_inline;
624 max_parm_reg = p->max_parm_reg;
625 parm_reg_stack_loc = p->parm_reg_stack_loc;
626 current_function_outgoing_args_size = p->outgoing_args_size;
627 current_function_return_rtx = p->return_rtx;
628 nonlocal_goto_handler_slots = p->nonlocal_goto_handler_slots;
629 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
630 nonlocal_labels = p->nonlocal_labels;
631 cleanup_label = p->cleanup_label;
632 return_label = p->return_label;
633 save_expr_regs = p->save_expr_regs;
634 stack_slot_list = p->stack_slot_list;
635 parm_birth_insn = p->parm_birth_insn;
636 frame_offset = p->frame_offset;
637 tail_recursion_label = p->tail_recursion_label;
638 tail_recursion_reentry = p->tail_recursion_reentry;
639 arg_pointer_save_area = p->arg_pointer_save_area;
640 rtl_expr_chain = p->rtl_expr_chain;
641 last_parm_insn = p->last_parm_insn;
642 context_display = p->context_display;
643 trampoline_list = p->trampoline_list;
644 function_call_count = p->function_call_count;
645 temp_slots = p->temp_slots;
646 temp_slot_level = p->temp_slot_level;
647 target_temp_slot_level = p->target_temp_slot_level;
648 var_temp_slot_level = p->var_temp_slot_level;
649 current_function_epilogue_delay_list = p->epilogue_delay_list;
650 reg_renumber = 0;
651 current_function_args_info = p->args_info;
652 current_function_check_memory_usage = p->check_memory_usage;
653 current_function_instrument_entry_exit = p->instrument_entry_exit;
654
655 restore_tree_status (p, context);
656 restore_storage_status (p);
657 restore_expr_status (p);
658 restore_emit_status (p);
659 restore_stmt_status (p);
660 restore_varasm_status (p);
661
662 if (restore_machine_status)
663 (*restore_machine_status) (p);
664
665 /* Finish doing put_var_into_stack for any of our variables
666 which became addressable during the nested function. */
667 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
668 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
669
670 free (p);
671
672 /* Reset variables that have known state during rtx generation. */
673 rtx_equal_function_value_matters = 1;
674 virtuals_instantiated = 0;
675 }
676
677 void pop_function_context ()
678 {
679 pop_function_context_from (current_function_decl);
680 }
681 \f
682 /* Allocate fixed slots in the stack frame of the current function. */
683
684 /* Return size needed for stack frame based on slots so far allocated.
685 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
686 the caller may have to do that. */
687
688 HOST_WIDE_INT
689 get_frame_size ()
690 {
691 #ifdef FRAME_GROWS_DOWNWARD
692 return -frame_offset;
693 #else
694 return frame_offset;
695 #endif
696 }
697
698 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
699 with machine mode MODE.
700
701 ALIGN controls the amount of alignment for the address of the slot:
702 0 means according to MODE,
703 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
704 positive specifies alignment boundary in bits.
705
706 We do not round to stack_boundary here. */
707
708 rtx
709 assign_stack_local (mode, size, align)
710 enum machine_mode mode;
711 HOST_WIDE_INT size;
712 int align;
713 {
714 register rtx x, addr;
715 int bigend_correction = 0;
716 int alignment;
717
718 if (align == 0)
719 {
720 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
721 if (mode == BLKmode)
722 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
723 }
724 else if (align == -1)
725 {
726 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
727 size = CEIL_ROUND (size, alignment);
728 }
729 else
730 alignment = align / BITS_PER_UNIT;
731
732 /* Round frame offset to that alignment.
733 We must be careful here, since FRAME_OFFSET might be negative and
734 division with a negative dividend isn't as well defined as we might
735 like. So we instead assume that ALIGNMENT is a power of two and
736 use logical operations which are unambiguous. */
737 #ifdef FRAME_GROWS_DOWNWARD
738 frame_offset = FLOOR_ROUND (frame_offset, alignment);
739 #else
740 frame_offset = CEIL_ROUND (frame_offset, alignment);
741 #endif
742
743 /* On a big-endian machine, if we are allocating more space than we will use,
744 use the least significant bytes of those that are allocated. */
745 if (BYTES_BIG_ENDIAN && mode != BLKmode)
746 bigend_correction = size - GET_MODE_SIZE (mode);
747
748 #ifdef FRAME_GROWS_DOWNWARD
749 frame_offset -= size;
750 #endif
751
752 /* If we have already instantiated virtual registers, return the actual
753 address relative to the frame pointer. */
754 if (virtuals_instantiated)
755 addr = plus_constant (frame_pointer_rtx,
756 (frame_offset + bigend_correction
757 + STARTING_FRAME_OFFSET));
758 else
759 addr = plus_constant (virtual_stack_vars_rtx,
760 frame_offset + bigend_correction);
761
762 #ifndef FRAME_GROWS_DOWNWARD
763 frame_offset += size;
764 #endif
765
766 x = gen_rtx_MEM (mode, addr);
767
768 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
769
770 return x;
771 }
772
773 /* Assign a stack slot in a containing function.
774 First three arguments are same as in preceding function.
775 The last argument specifies the function to allocate in. */
776
777 static rtx
778 assign_outer_stack_local (mode, size, align, function)
779 enum machine_mode mode;
780 HOST_WIDE_INT size;
781 int align;
782 struct function *function;
783 {
784 register rtx x, addr;
785 int bigend_correction = 0;
786 int alignment;
787
788 /* Allocate in the memory associated with the function in whose frame
789 we are assigning. */
790 push_obstacks (function->function_obstack,
791 function->function_maybepermanent_obstack);
792
793 if (align == 0)
794 {
795 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
796 if (mode == BLKmode)
797 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
798 }
799 else if (align == -1)
800 {
801 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
802 size = CEIL_ROUND (size, alignment);
803 }
804 else
805 alignment = align / BITS_PER_UNIT;
806
807 /* Round frame offset to that alignment. */
808 #ifdef FRAME_GROWS_DOWNWARD
809 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
810 #else
811 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
812 #endif
813
814 /* On a big-endian machine, if we are allocating more space than we will use,
815 use the least significant bytes of those that are allocated. */
816 if (BYTES_BIG_ENDIAN && mode != BLKmode)
817 bigend_correction = size - GET_MODE_SIZE (mode);
818
819 #ifdef FRAME_GROWS_DOWNWARD
820 function->frame_offset -= size;
821 #endif
822 addr = plus_constant (virtual_stack_vars_rtx,
823 function->frame_offset + bigend_correction);
824 #ifndef FRAME_GROWS_DOWNWARD
825 function->frame_offset += size;
826 #endif
827
828 x = gen_rtx_MEM (mode, addr);
829
830 function->stack_slot_list
831 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
832
833 pop_obstacks ();
834
835 return x;
836 }
837 \f
838 /* Allocate a temporary stack slot and record it for possible later
839 reuse.
840
841 MODE is the machine mode to be given to the returned rtx.
842
843 SIZE is the size in units of the space required. We do no rounding here
844 since assign_stack_local will do any required rounding.
845
846 KEEP is 1 if this slot is to be retained after a call to
847 free_temp_slots. Automatic variables for a block are allocated
848 with this flag. KEEP is 2 if we allocate a longer term temporary,
849 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
850 if we are to allocate something at an inner level to be treated as
851 a variable in the block (e.g., a SAVE_EXPR). */
852
853 rtx
854 assign_stack_temp (mode, size, keep)
855 enum machine_mode mode;
856 HOST_WIDE_INT size;
857 int keep;
858 {
859 struct temp_slot *p, *best_p = 0;
860
861 /* If SIZE is -1 it means that somebody tried to allocate a temporary
862 of a variable size. */
863 if (size == -1)
864 abort ();
865
866 /* First try to find an available, already-allocated temporary that is the
867 exact size we require. */
868 for (p = temp_slots; p; p = p->next)
869 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
870 break;
871
872 /* If we didn't find, one, try one that is larger than what we want. We
873 find the smallest such. */
874 if (p == 0)
875 for (p = temp_slots; p; p = p->next)
876 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
877 && (best_p == 0 || best_p->size > p->size))
878 best_p = p;
879
880 /* Make our best, if any, the one to use. */
881 if (best_p)
882 {
883 /* If there are enough aligned bytes left over, make them into a new
884 temp_slot so that the extra bytes don't get wasted. Do this only
885 for BLKmode slots, so that we can be sure of the alignment. */
886 if (GET_MODE (best_p->slot) == BLKmode)
887 {
888 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
889 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
890
891 if (best_p->size - rounded_size >= alignment)
892 {
893 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
894 p->in_use = p->addr_taken = 0;
895 p->size = best_p->size - rounded_size;
896 p->base_offset = best_p->base_offset + rounded_size;
897 p->full_size = best_p->full_size - rounded_size;
898 p->slot = gen_rtx_MEM (BLKmode,
899 plus_constant (XEXP (best_p->slot, 0),
900 rounded_size));
901 p->address = 0;
902 p->rtl_expr = 0;
903 p->next = temp_slots;
904 temp_slots = p;
905
906 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
907 stack_slot_list);
908
909 best_p->size = rounded_size;
910 best_p->full_size = rounded_size;
911 }
912 }
913
914 p = best_p;
915 }
916
917 /* If we still didn't find one, make a new temporary. */
918 if (p == 0)
919 {
920 HOST_WIDE_INT frame_offset_old = frame_offset;
921
922 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
923
924 /* If the temp slot mode doesn't indicate the alignment,
925 use the largest possible, so no one will be disappointed. */
926 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
927
928 /* The following slot size computation is necessary because we don't
929 know the actual size of the temporary slot until assign_stack_local
930 has performed all the frame alignment and size rounding for the
931 requested temporary. Note that extra space added for alignment
932 can be either above or below this stack slot depending on which
933 way the frame grows. We include the extra space if and only if it
934 is above this slot. */
935 #ifdef FRAME_GROWS_DOWNWARD
936 p->size = frame_offset_old - frame_offset;
937 #else
938 p->size = size;
939 #endif
940
941 /* Now define the fields used by combine_temp_slots. */
942 #ifdef FRAME_GROWS_DOWNWARD
943 p->base_offset = frame_offset;
944 p->full_size = frame_offset_old - frame_offset;
945 #else
946 p->base_offset = frame_offset_old;
947 p->full_size = frame_offset - frame_offset_old;
948 #endif
949 p->address = 0;
950 p->next = temp_slots;
951 temp_slots = p;
952 }
953
954 p->in_use = 1;
955 p->addr_taken = 0;
956 p->rtl_expr = sequence_rtl_expr;
957
958 if (keep == 2)
959 {
960 p->level = target_temp_slot_level;
961 p->keep = 0;
962 }
963 else if (keep == 3)
964 {
965 p->level = var_temp_slot_level;
966 p->keep = 0;
967 }
968 else
969 {
970 p->level = temp_slot_level;
971 p->keep = keep;
972 }
973
974 /* We may be reusing an old slot, so clear any MEM flags that may have been
975 set from before. */
976 RTX_UNCHANGING_P (p->slot) = 0;
977 MEM_IN_STRUCT_P (p->slot) = 0;
978 MEM_SCALAR_P (p->slot) = 0;
979 MEM_ALIAS_SET (p->slot) = 0;
980 return p->slot;
981 }
982 \f
983 /* Assign a temporary of given TYPE.
984 KEEP is as for assign_stack_temp.
985 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
986 it is 0 if a register is OK.
987 DONT_PROMOTE is 1 if we should not promote values in register
988 to wider modes. */
989
990 rtx
991 assign_temp (type, keep, memory_required, dont_promote)
992 tree type;
993 int keep;
994 int memory_required;
995 int dont_promote;
996 {
997 enum machine_mode mode = TYPE_MODE (type);
998 int unsignedp = TREE_UNSIGNED (type);
999
1000 if (mode == BLKmode || memory_required)
1001 {
1002 HOST_WIDE_INT size = int_size_in_bytes (type);
1003 rtx tmp;
1004
1005 /* Unfortunately, we don't yet know how to allocate variable-sized
1006 temporaries. However, sometimes we have a fixed upper limit on
1007 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
1008 instead. This is the case for Chill variable-sized strings. */
1009 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
1010 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
1011 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
1012 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
1013
1014 tmp = assign_stack_temp (mode, size, keep);
1015 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
1016 return tmp;
1017 }
1018
1019 #ifndef PROMOTE_FOR_CALL_ONLY
1020 if (! dont_promote)
1021 mode = promote_mode (type, mode, &unsignedp, 0);
1022 #endif
1023
1024 return gen_reg_rtx (mode);
1025 }
1026 \f
1027 /* Combine temporary stack slots which are adjacent on the stack.
1028
1029 This allows for better use of already allocated stack space. This is only
1030 done for BLKmode slots because we can be sure that we won't have alignment
1031 problems in this case. */
1032
1033 void
1034 combine_temp_slots ()
1035 {
1036 struct temp_slot *p, *q;
1037 struct temp_slot *prev_p, *prev_q;
1038 int num_slots;
1039
1040 /* If there are a lot of temp slots, don't do anything unless
1041 high levels of optimizaton. */
1042 if (! flag_expensive_optimizations)
1043 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1044 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1045 return;
1046
1047 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1048 {
1049 int delete_p = 0;
1050
1051 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1052 for (q = p->next, prev_q = p; q; q = prev_q->next)
1053 {
1054 int delete_q = 0;
1055 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1056 {
1057 if (p->base_offset + p->full_size == q->base_offset)
1058 {
1059 /* Q comes after P; combine Q into P. */
1060 p->size += q->size;
1061 p->full_size += q->full_size;
1062 delete_q = 1;
1063 }
1064 else if (q->base_offset + q->full_size == p->base_offset)
1065 {
1066 /* P comes after Q; combine P into Q. */
1067 q->size += p->size;
1068 q->full_size += p->full_size;
1069 delete_p = 1;
1070 break;
1071 }
1072 }
1073 /* Either delete Q or advance past it. */
1074 if (delete_q)
1075 prev_q->next = q->next;
1076 else
1077 prev_q = q;
1078 }
1079 /* Either delete P or advance past it. */
1080 if (delete_p)
1081 {
1082 if (prev_p)
1083 prev_p->next = p->next;
1084 else
1085 temp_slots = p->next;
1086 }
1087 else
1088 prev_p = p;
1089 }
1090 }
1091 \f
1092 /* Find the temp slot corresponding to the object at address X. */
1093
1094 static struct temp_slot *
1095 find_temp_slot_from_address (x)
1096 rtx x;
1097 {
1098 struct temp_slot *p;
1099 rtx next;
1100
1101 for (p = temp_slots; p; p = p->next)
1102 {
1103 if (! p->in_use)
1104 continue;
1105
1106 else if (XEXP (p->slot, 0) == x
1107 || p->address == x
1108 || (GET_CODE (x) == PLUS
1109 && XEXP (x, 0) == virtual_stack_vars_rtx
1110 && GET_CODE (XEXP (x, 1)) == CONST_INT
1111 && INTVAL (XEXP (x, 1)) >= p->base_offset
1112 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1113 return p;
1114
1115 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1116 for (next = p->address; next; next = XEXP (next, 1))
1117 if (XEXP (next, 0) == x)
1118 return p;
1119 }
1120
1121 return 0;
1122 }
1123
1124 /* Indicate that NEW is an alternate way of referring to the temp slot
1125 that previously was known by OLD. */
1126
1127 void
1128 update_temp_slot_address (old, new)
1129 rtx old, new;
1130 {
1131 struct temp_slot *p = find_temp_slot_from_address (old);
1132
1133 /* If none, return. Else add NEW as an alias. */
1134 if (p == 0)
1135 return;
1136 else if (p->address == 0)
1137 p->address = new;
1138 else
1139 {
1140 if (GET_CODE (p->address) != EXPR_LIST)
1141 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1142
1143 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1144 }
1145 }
1146
1147 /* If X could be a reference to a temporary slot, mark the fact that its
1148 address was taken. */
1149
1150 void
1151 mark_temp_addr_taken (x)
1152 rtx x;
1153 {
1154 struct temp_slot *p;
1155
1156 if (x == 0)
1157 return;
1158
1159 /* If X is not in memory or is at a constant address, it cannot be in
1160 a temporary slot. */
1161 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1162 return;
1163
1164 p = find_temp_slot_from_address (XEXP (x, 0));
1165 if (p != 0)
1166 p->addr_taken = 1;
1167 }
1168
1169 /* If X could be a reference to a temporary slot, mark that slot as
1170 belonging to the to one level higher than the current level. If X
1171 matched one of our slots, just mark that one. Otherwise, we can't
1172 easily predict which it is, so upgrade all of them. Kept slots
1173 need not be touched.
1174
1175 This is called when an ({...}) construct occurs and a statement
1176 returns a value in memory. */
1177
1178 void
1179 preserve_temp_slots (x)
1180 rtx x;
1181 {
1182 struct temp_slot *p = 0;
1183
1184 /* If there is no result, we still might have some objects whose address
1185 were taken, so we need to make sure they stay around. */
1186 if (x == 0)
1187 {
1188 for (p = temp_slots; p; p = p->next)
1189 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1190 p->level--;
1191
1192 return;
1193 }
1194
1195 /* If X is a register that is being used as a pointer, see if we have
1196 a temporary slot we know it points to. To be consistent with
1197 the code below, we really should preserve all non-kept slots
1198 if we can't find a match, but that seems to be much too costly. */
1199 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1200 p = find_temp_slot_from_address (x);
1201
1202 /* If X is not in memory or is at a constant address, it cannot be in
1203 a temporary slot, but it can contain something whose address was
1204 taken. */
1205 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1206 {
1207 for (p = temp_slots; p; p = p->next)
1208 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1209 p->level--;
1210
1211 return;
1212 }
1213
1214 /* First see if we can find a match. */
1215 if (p == 0)
1216 p = find_temp_slot_from_address (XEXP (x, 0));
1217
1218 if (p != 0)
1219 {
1220 /* Move everything at our level whose address was taken to our new
1221 level in case we used its address. */
1222 struct temp_slot *q;
1223
1224 if (p->level == temp_slot_level)
1225 {
1226 for (q = temp_slots; q; q = q->next)
1227 if (q != p && q->addr_taken && q->level == p->level)
1228 q->level--;
1229
1230 p->level--;
1231 p->addr_taken = 0;
1232 }
1233 return;
1234 }
1235
1236 /* Otherwise, preserve all non-kept slots at this level. */
1237 for (p = temp_slots; p; p = p->next)
1238 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1239 p->level--;
1240 }
1241
1242 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1243 with that RTL_EXPR, promote it into a temporary slot at the present
1244 level so it will not be freed when we free slots made in the
1245 RTL_EXPR. */
1246
1247 void
1248 preserve_rtl_expr_result (x)
1249 rtx x;
1250 {
1251 struct temp_slot *p;
1252
1253 /* If X is not in memory or is at a constant address, it cannot be in
1254 a temporary slot. */
1255 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1256 return;
1257
1258 /* If we can find a match, move it to our level unless it is already at
1259 an upper level. */
1260 p = find_temp_slot_from_address (XEXP (x, 0));
1261 if (p != 0)
1262 {
1263 p->level = MIN (p->level, temp_slot_level);
1264 p->rtl_expr = 0;
1265 }
1266
1267 return;
1268 }
1269
1270 /* Free all temporaries used so far. This is normally called at the end
1271 of generating code for a statement. Don't free any temporaries
1272 currently in use for an RTL_EXPR that hasn't yet been emitted.
1273 We could eventually do better than this since it can be reused while
1274 generating the same RTL_EXPR, but this is complex and probably not
1275 worthwhile. */
1276
1277 void
1278 free_temp_slots ()
1279 {
1280 struct temp_slot *p;
1281
1282 for (p = temp_slots; p; p = p->next)
1283 if (p->in_use && p->level == temp_slot_level && ! p->keep
1284 && p->rtl_expr == 0)
1285 p->in_use = 0;
1286
1287 combine_temp_slots ();
1288 }
1289
1290 /* Free all temporary slots used in T, an RTL_EXPR node. */
1291
1292 void
1293 free_temps_for_rtl_expr (t)
1294 tree t;
1295 {
1296 struct temp_slot *p;
1297
1298 for (p = temp_slots; p; p = p->next)
1299 if (p->rtl_expr == t)
1300 p->in_use = 0;
1301
1302 combine_temp_slots ();
1303 }
1304
1305 /* Mark all temporaries ever allocated in this function as not suitable
1306 for reuse until the current level is exited. */
1307
1308 void
1309 mark_all_temps_used ()
1310 {
1311 struct temp_slot *p;
1312
1313 for (p = temp_slots; p; p = p->next)
1314 {
1315 p->in_use = p->keep = 1;
1316 p->level = MIN (p->level, temp_slot_level);
1317 }
1318 }
1319
1320 /* Push deeper into the nesting level for stack temporaries. */
1321
1322 void
1323 push_temp_slots ()
1324 {
1325 temp_slot_level++;
1326 }
1327
1328 /* Likewise, but save the new level as the place to allocate variables
1329 for blocks. */
1330
1331 void
1332 push_temp_slots_for_block ()
1333 {
1334 push_temp_slots ();
1335
1336 var_temp_slot_level = temp_slot_level;
1337 }
1338
1339 /* Likewise, but save the new level as the place to allocate temporaries
1340 for TARGET_EXPRs. */
1341
1342 void
1343 push_temp_slots_for_target ()
1344 {
1345 push_temp_slots ();
1346
1347 target_temp_slot_level = temp_slot_level;
1348 }
1349
1350 /* Set and get the value of target_temp_slot_level. The only
1351 permitted use of these functions is to save and restore this value. */
1352
1353 int
1354 get_target_temp_slot_level ()
1355 {
1356 return target_temp_slot_level;
1357 }
1358
1359 void
1360 set_target_temp_slot_level (level)
1361 int level;
1362 {
1363 target_temp_slot_level = level;
1364 }
1365
1366 /* Pop a temporary nesting level. All slots in use in the current level
1367 are freed. */
1368
1369 void
1370 pop_temp_slots ()
1371 {
1372 struct temp_slot *p;
1373
1374 for (p = temp_slots; p; p = p->next)
1375 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1376 p->in_use = 0;
1377
1378 combine_temp_slots ();
1379
1380 temp_slot_level--;
1381 }
1382
1383 /* Initialize temporary slots. */
1384
1385 void
1386 init_temp_slots ()
1387 {
1388 /* We have not allocated any temporaries yet. */
1389 temp_slots = 0;
1390 temp_slot_level = 0;
1391 var_temp_slot_level = 0;
1392 target_temp_slot_level = 0;
1393 }
1394 \f
1395 /* Retroactively move an auto variable from a register to a stack slot.
1396 This is done when an address-reference to the variable is seen. */
1397
1398 void
1399 put_var_into_stack (decl)
1400 tree decl;
1401 {
1402 register rtx reg;
1403 enum machine_mode promoted_mode, decl_mode;
1404 struct function *function = 0;
1405 tree context;
1406 int can_use_addressof;
1407
1408 context = decl_function_context (decl);
1409
1410 /* Get the current rtl used for this object and its original mode. */
1411 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1412
1413 /* No need to do anything if decl has no rtx yet
1414 since in that case caller is setting TREE_ADDRESSABLE
1415 and a stack slot will be assigned when the rtl is made. */
1416 if (reg == 0)
1417 return;
1418
1419 /* Get the declared mode for this object. */
1420 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1421 : DECL_MODE (decl));
1422 /* Get the mode it's actually stored in. */
1423 promoted_mode = GET_MODE (reg);
1424
1425 /* If this variable comes from an outer function,
1426 find that function's saved context. */
1427 if (context != current_function_decl && context != inline_function_decl)
1428 for (function = outer_function_chain; function; function = function->next)
1429 if (function->decl == context)
1430 break;
1431
1432 /* If this is a variable-size object with a pseudo to address it,
1433 put that pseudo into the stack, if the var is nonlocal. */
1434 if (DECL_NONLOCAL (decl)
1435 && GET_CODE (reg) == MEM
1436 && GET_CODE (XEXP (reg, 0)) == REG
1437 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1438 {
1439 reg = XEXP (reg, 0);
1440 decl_mode = promoted_mode = GET_MODE (reg);
1441 }
1442
1443 can_use_addressof
1444 = (function == 0
1445 && optimize > 0
1446 /* FIXME make it work for promoted modes too */
1447 && decl_mode == promoted_mode
1448 #ifdef NON_SAVING_SETJMP
1449 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1450 #endif
1451 );
1452
1453 /* If we can't use ADDRESSOF, make sure we see through one we already
1454 generated. */
1455 if (! can_use_addressof && GET_CODE (reg) == MEM
1456 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1457 reg = XEXP (XEXP (reg, 0), 0);
1458
1459 /* Now we should have a value that resides in one or more pseudo regs. */
1460
1461 if (GET_CODE (reg) == REG)
1462 {
1463 /* If this variable lives in the current function and we don't need
1464 to put things in the stack for the sake of setjmp, try to keep it
1465 in a register until we know we actually need the address. */
1466 if (can_use_addressof)
1467 gen_mem_addressof (reg, decl);
1468 else
1469 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1470 promoted_mode, decl_mode,
1471 TREE_SIDE_EFFECTS (decl), 0,
1472 TREE_USED (decl)
1473 || DECL_INITIAL (decl) != 0);
1474 }
1475 else if (GET_CODE (reg) == CONCAT)
1476 {
1477 /* A CONCAT contains two pseudos; put them both in the stack.
1478 We do it so they end up consecutive. */
1479 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1480 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1481 #ifdef FRAME_GROWS_DOWNWARD
1482 /* Since part 0 should have a lower address, do it second. */
1483 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1484 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1485 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1486 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1487 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1488 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1489 #else
1490 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1491 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1492 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1493 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1494 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1495 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1496 #endif
1497
1498 /* Change the CONCAT into a combined MEM for both parts. */
1499 PUT_CODE (reg, MEM);
1500 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1501 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1502
1503 /* The two parts are in memory order already.
1504 Use the lower parts address as ours. */
1505 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1506 /* Prevent sharing of rtl that might lose. */
1507 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1508 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1509 }
1510 else
1511 return;
1512
1513 if (current_function_check_memory_usage)
1514 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1515 XEXP (reg, 0), ptr_mode,
1516 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1517 TYPE_MODE (sizetype),
1518 GEN_INT (MEMORY_USE_RW),
1519 TYPE_MODE (integer_type_node));
1520 }
1521
1522 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1523 into the stack frame of FUNCTION (0 means the current function).
1524 DECL_MODE is the machine mode of the user-level data type.
1525 PROMOTED_MODE is the machine mode of the register.
1526 VOLATILE_P is nonzero if this is for a "volatile" decl.
1527 USED_P is nonzero if this reg might have already been used in an insn. */
1528
1529 static void
1530 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1531 original_regno, used_p)
1532 struct function *function;
1533 rtx reg;
1534 tree type;
1535 enum machine_mode promoted_mode, decl_mode;
1536 int volatile_p;
1537 int original_regno;
1538 int used_p;
1539 {
1540 rtx new = 0;
1541 int regno = original_regno;
1542
1543 if (regno == 0)
1544 regno = REGNO (reg);
1545
1546 if (function)
1547 {
1548 if (regno < function->max_parm_reg)
1549 new = function->parm_reg_stack_loc[regno];
1550 if (new == 0)
1551 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1552 0, function);
1553 }
1554 else
1555 {
1556 if (regno < max_parm_reg)
1557 new = parm_reg_stack_loc[regno];
1558 if (new == 0)
1559 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1560 }
1561
1562 PUT_MODE (reg, decl_mode);
1563 XEXP (reg, 0) = XEXP (new, 0);
1564 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1565 MEM_VOLATILE_P (reg) = volatile_p;
1566 PUT_CODE (reg, MEM);
1567
1568 /* If this is a memory ref that contains aggregate components,
1569 mark it as such for cse and loop optimize. If we are reusing a
1570 previously generated stack slot, then we need to copy the bit in
1571 case it was set for other reasons. For instance, it is set for
1572 __builtin_va_alist. */
1573 MEM_SET_IN_STRUCT_P (reg,
1574 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1575 MEM_ALIAS_SET (reg) = get_alias_set (type);
1576
1577 /* Now make sure that all refs to the variable, previously made
1578 when it was a register, are fixed up to be valid again. */
1579
1580 if (used_p && function != 0)
1581 {
1582 struct var_refs_queue *temp;
1583
1584 /* Variable is inherited; fix it up when we get back to its function. */
1585 push_obstacks (function->function_obstack,
1586 function->function_maybepermanent_obstack);
1587
1588 /* See comment in restore_tree_status in tree.c for why this needs to be
1589 on saveable obstack. */
1590 temp
1591 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1592 temp->modified = reg;
1593 temp->promoted_mode = promoted_mode;
1594 temp->unsignedp = TREE_UNSIGNED (type);
1595 temp->next = function->fixup_var_refs_queue;
1596 function->fixup_var_refs_queue = temp;
1597 pop_obstacks ();
1598 }
1599 else if (used_p)
1600 /* Variable is local; fix it up now. */
1601 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1602 }
1603 \f
1604 static void
1605 fixup_var_refs (var, promoted_mode, unsignedp)
1606 rtx var;
1607 enum machine_mode promoted_mode;
1608 int unsignedp;
1609 {
1610 tree pending;
1611 rtx first_insn = get_insns ();
1612 struct sequence_stack *stack = sequence_stack;
1613 tree rtl_exps = rtl_expr_chain;
1614
1615 /* Must scan all insns for stack-refs that exceed the limit. */
1616 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1617
1618 /* Scan all pending sequences too. */
1619 for (; stack; stack = stack->next)
1620 {
1621 push_to_sequence (stack->first);
1622 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1623 stack->first, stack->next != 0);
1624 /* Update remembered end of sequence
1625 in case we added an insn at the end. */
1626 stack->last = get_last_insn ();
1627 end_sequence ();
1628 }
1629
1630 /* Scan all waiting RTL_EXPRs too. */
1631 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1632 {
1633 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1634 if (seq != const0_rtx && seq != 0)
1635 {
1636 push_to_sequence (seq);
1637 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1638 end_sequence ();
1639 }
1640 }
1641 }
1642 \f
1643 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1644 some part of an insn. Return a struct fixup_replacement whose OLD
1645 value is equal to X. Allocate a new structure if no such entry exists. */
1646
1647 static struct fixup_replacement *
1648 find_fixup_replacement (replacements, x)
1649 struct fixup_replacement **replacements;
1650 rtx x;
1651 {
1652 struct fixup_replacement *p;
1653
1654 /* See if we have already replaced this. */
1655 for (p = *replacements; p && p->old != x; p = p->next)
1656 ;
1657
1658 if (p == 0)
1659 {
1660 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1661 p->old = x;
1662 p->new = 0;
1663 p->next = *replacements;
1664 *replacements = p;
1665 }
1666
1667 return p;
1668 }
1669
1670 /* Scan the insn-chain starting with INSN for refs to VAR
1671 and fix them up. TOPLEVEL is nonzero if this chain is the
1672 main chain of insns for the current function. */
1673
1674 static void
1675 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1676 rtx var;
1677 enum machine_mode promoted_mode;
1678 int unsignedp;
1679 rtx insn;
1680 int toplevel;
1681 {
1682 rtx call_dest = 0;
1683
1684 while (insn)
1685 {
1686 rtx next = NEXT_INSN (insn);
1687 rtx set, prev, prev_set;
1688 rtx note;
1689
1690 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1691 {
1692 /* If this is a CLOBBER of VAR, delete it.
1693
1694 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1695 and REG_RETVAL notes too. */
1696 if (GET_CODE (PATTERN (insn)) == CLOBBER
1697 && (XEXP (PATTERN (insn), 0) == var
1698 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1699 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1700 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1701 {
1702 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1703 /* The REG_LIBCALL note will go away since we are going to
1704 turn INSN into a NOTE, so just delete the
1705 corresponding REG_RETVAL note. */
1706 remove_note (XEXP (note, 0),
1707 find_reg_note (XEXP (note, 0), REG_RETVAL,
1708 NULL_RTX));
1709
1710 /* In unoptimized compilation, we shouldn't call delete_insn
1711 except in jump.c doing warnings. */
1712 PUT_CODE (insn, NOTE);
1713 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1714 NOTE_SOURCE_FILE (insn) = 0;
1715 }
1716
1717 /* The insn to load VAR from a home in the arglist
1718 is now a no-op. When we see it, just delete it.
1719 Similarly if this is storing VAR from a register from which
1720 it was loaded in the previous insn. This will occur
1721 when an ADDRESSOF was made for an arglist slot. */
1722 else if (toplevel
1723 && (set = single_set (insn)) != 0
1724 && SET_DEST (set) == var
1725 /* If this represents the result of an insn group,
1726 don't delete the insn. */
1727 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1728 && (rtx_equal_p (SET_SRC (set), var)
1729 || (GET_CODE (SET_SRC (set)) == REG
1730 && (prev = prev_nonnote_insn (insn)) != 0
1731 && (prev_set = single_set (prev)) != 0
1732 && SET_DEST (prev_set) == SET_SRC (set)
1733 && rtx_equal_p (SET_SRC (prev_set), var))))
1734 {
1735 /* In unoptimized compilation, we shouldn't call delete_insn
1736 except in jump.c doing warnings. */
1737 PUT_CODE (insn, NOTE);
1738 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1739 NOTE_SOURCE_FILE (insn) = 0;
1740 if (insn == last_parm_insn)
1741 last_parm_insn = PREV_INSN (next);
1742 }
1743 else
1744 {
1745 struct fixup_replacement *replacements = 0;
1746 rtx next_insn = NEXT_INSN (insn);
1747
1748 if (SMALL_REGISTER_CLASSES)
1749 {
1750 /* If the insn that copies the results of a CALL_INSN
1751 into a pseudo now references VAR, we have to use an
1752 intermediate pseudo since we want the life of the
1753 return value register to be only a single insn.
1754
1755 If we don't use an intermediate pseudo, such things as
1756 address computations to make the address of VAR valid
1757 if it is not can be placed between the CALL_INSN and INSN.
1758
1759 To make sure this doesn't happen, we record the destination
1760 of the CALL_INSN and see if the next insn uses both that
1761 and VAR. */
1762
1763 if (call_dest != 0 && GET_CODE (insn) == INSN
1764 && reg_mentioned_p (var, PATTERN (insn))
1765 && reg_mentioned_p (call_dest, PATTERN (insn)))
1766 {
1767 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1768
1769 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1770
1771 PATTERN (insn) = replace_rtx (PATTERN (insn),
1772 call_dest, temp);
1773 }
1774
1775 if (GET_CODE (insn) == CALL_INSN
1776 && GET_CODE (PATTERN (insn)) == SET)
1777 call_dest = SET_DEST (PATTERN (insn));
1778 else if (GET_CODE (insn) == CALL_INSN
1779 && GET_CODE (PATTERN (insn)) == PARALLEL
1780 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1781 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1782 else
1783 call_dest = 0;
1784 }
1785
1786 /* See if we have to do anything to INSN now that VAR is in
1787 memory. If it needs to be loaded into a pseudo, use a single
1788 pseudo for the entire insn in case there is a MATCH_DUP
1789 between two operands. We pass a pointer to the head of
1790 a list of struct fixup_replacements. If fixup_var_refs_1
1791 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1792 it will record them in this list.
1793
1794 If it allocated a pseudo for any replacement, we copy into
1795 it here. */
1796
1797 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1798 &replacements);
1799
1800 /* If this is last_parm_insn, and any instructions were output
1801 after it to fix it up, then we must set last_parm_insn to
1802 the last such instruction emitted. */
1803 if (insn == last_parm_insn)
1804 last_parm_insn = PREV_INSN (next_insn);
1805
1806 while (replacements)
1807 {
1808 if (GET_CODE (replacements->new) == REG)
1809 {
1810 rtx insert_before;
1811 rtx seq;
1812
1813 /* OLD might be a (subreg (mem)). */
1814 if (GET_CODE (replacements->old) == SUBREG)
1815 replacements->old
1816 = fixup_memory_subreg (replacements->old, insn, 0);
1817 else
1818 replacements->old
1819 = fixup_stack_1 (replacements->old, insn);
1820
1821 insert_before = insn;
1822
1823 /* If we are changing the mode, do a conversion.
1824 This might be wasteful, but combine.c will
1825 eliminate much of the waste. */
1826
1827 if (GET_MODE (replacements->new)
1828 != GET_MODE (replacements->old))
1829 {
1830 start_sequence ();
1831 convert_move (replacements->new,
1832 replacements->old, unsignedp);
1833 seq = gen_sequence ();
1834 end_sequence ();
1835 }
1836 else
1837 seq = gen_move_insn (replacements->new,
1838 replacements->old);
1839
1840 emit_insn_before (seq, insert_before);
1841 }
1842
1843 replacements = replacements->next;
1844 }
1845 }
1846
1847 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1848 But don't touch other insns referred to by reg-notes;
1849 we will get them elsewhere. */
1850 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1851 if (GET_CODE (note) != INSN_LIST)
1852 XEXP (note, 0)
1853 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1854 }
1855 insn = next;
1856 }
1857 }
1858 \f
1859 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1860 See if the rtx expression at *LOC in INSN needs to be changed.
1861
1862 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1863 contain a list of original rtx's and replacements. If we find that we need
1864 to modify this insn by replacing a memory reference with a pseudo or by
1865 making a new MEM to implement a SUBREG, we consult that list to see if
1866 we have already chosen a replacement. If none has already been allocated,
1867 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1868 or the SUBREG, as appropriate, to the pseudo. */
1869
1870 static void
1871 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1872 register rtx var;
1873 enum machine_mode promoted_mode;
1874 register rtx *loc;
1875 rtx insn;
1876 struct fixup_replacement **replacements;
1877 {
1878 register int i;
1879 register rtx x = *loc;
1880 RTX_CODE code = GET_CODE (x);
1881 register char *fmt;
1882 register rtx tem, tem1;
1883 struct fixup_replacement *replacement;
1884
1885 switch (code)
1886 {
1887 case ADDRESSOF:
1888 if (XEXP (x, 0) == var)
1889 {
1890 /* Prevent sharing of rtl that might lose. */
1891 rtx sub = copy_rtx (XEXP (var, 0));
1892
1893 start_sequence ();
1894
1895 if (! validate_change (insn, loc, sub, 0))
1896 {
1897 rtx y = force_operand (sub, NULL_RTX);
1898
1899 if (! validate_change (insn, loc, y, 0))
1900 *loc = copy_to_reg (y);
1901 }
1902
1903 emit_insn_before (gen_sequence (), insn);
1904 end_sequence ();
1905 }
1906 return;
1907
1908 case MEM:
1909 if (var == x)
1910 {
1911 /* If we already have a replacement, use it. Otherwise,
1912 try to fix up this address in case it is invalid. */
1913
1914 replacement = find_fixup_replacement (replacements, var);
1915 if (replacement->new)
1916 {
1917 *loc = replacement->new;
1918 return;
1919 }
1920
1921 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1922
1923 /* Unless we are forcing memory to register or we changed the mode,
1924 we can leave things the way they are if the insn is valid. */
1925
1926 INSN_CODE (insn) = -1;
1927 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1928 && recog_memoized (insn) >= 0)
1929 return;
1930
1931 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1932 return;
1933 }
1934
1935 /* If X contains VAR, we need to unshare it here so that we update
1936 each occurrence separately. But all identical MEMs in one insn
1937 must be replaced with the same rtx because of the possibility of
1938 MATCH_DUPs. */
1939
1940 if (reg_mentioned_p (var, x))
1941 {
1942 replacement = find_fixup_replacement (replacements, x);
1943 if (replacement->new == 0)
1944 replacement->new = copy_most_rtx (x, var);
1945
1946 *loc = x = replacement->new;
1947 }
1948 break;
1949
1950 case REG:
1951 case CC0:
1952 case PC:
1953 case CONST_INT:
1954 case CONST:
1955 case SYMBOL_REF:
1956 case LABEL_REF:
1957 case CONST_DOUBLE:
1958 return;
1959
1960 case SIGN_EXTRACT:
1961 case ZERO_EXTRACT:
1962 /* Note that in some cases those types of expressions are altered
1963 by optimize_bit_field, and do not survive to get here. */
1964 if (XEXP (x, 0) == var
1965 || (GET_CODE (XEXP (x, 0)) == SUBREG
1966 && SUBREG_REG (XEXP (x, 0)) == var))
1967 {
1968 /* Get TEM as a valid MEM in the mode presently in the insn.
1969
1970 We don't worry about the possibility of MATCH_DUP here; it
1971 is highly unlikely and would be tricky to handle. */
1972
1973 tem = XEXP (x, 0);
1974 if (GET_CODE (tem) == SUBREG)
1975 {
1976 if (GET_MODE_BITSIZE (GET_MODE (tem))
1977 > GET_MODE_BITSIZE (GET_MODE (var)))
1978 {
1979 replacement = find_fixup_replacement (replacements, var);
1980 if (replacement->new == 0)
1981 replacement->new = gen_reg_rtx (GET_MODE (var));
1982 SUBREG_REG (tem) = replacement->new;
1983 }
1984 else
1985 tem = fixup_memory_subreg (tem, insn, 0);
1986 }
1987 else
1988 tem = fixup_stack_1 (tem, insn);
1989
1990 /* Unless we want to load from memory, get TEM into the proper mode
1991 for an extract from memory. This can only be done if the
1992 extract is at a constant position and length. */
1993
1994 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1995 && GET_CODE (XEXP (x, 2)) == CONST_INT
1996 && ! mode_dependent_address_p (XEXP (tem, 0))
1997 && ! MEM_VOLATILE_P (tem))
1998 {
1999 enum machine_mode wanted_mode = VOIDmode;
2000 enum machine_mode is_mode = GET_MODE (tem);
2001 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2002
2003 #ifdef HAVE_extzv
2004 if (GET_CODE (x) == ZERO_EXTRACT)
2005 {
2006 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
2007 if (wanted_mode == VOIDmode)
2008 wanted_mode = word_mode;
2009 }
2010 #endif
2011 #ifdef HAVE_extv
2012 if (GET_CODE (x) == SIGN_EXTRACT)
2013 {
2014 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
2015 if (wanted_mode == VOIDmode)
2016 wanted_mode = word_mode;
2017 }
2018 #endif
2019 /* If we have a narrower mode, we can do something. */
2020 if (wanted_mode != VOIDmode
2021 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2022 {
2023 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2024 rtx old_pos = XEXP (x, 2);
2025 rtx newmem;
2026
2027 /* If the bytes and bits are counted differently, we
2028 must adjust the offset. */
2029 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2030 offset = (GET_MODE_SIZE (is_mode)
2031 - GET_MODE_SIZE (wanted_mode) - offset);
2032
2033 pos %= GET_MODE_BITSIZE (wanted_mode);
2034
2035 newmem = gen_rtx_MEM (wanted_mode,
2036 plus_constant (XEXP (tem, 0), offset));
2037 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2038 MEM_COPY_ATTRIBUTES (newmem, tem);
2039
2040 /* Make the change and see if the insn remains valid. */
2041 INSN_CODE (insn) = -1;
2042 XEXP (x, 0) = newmem;
2043 XEXP (x, 2) = GEN_INT (pos);
2044
2045 if (recog_memoized (insn) >= 0)
2046 return;
2047
2048 /* Otherwise, restore old position. XEXP (x, 0) will be
2049 restored later. */
2050 XEXP (x, 2) = old_pos;
2051 }
2052 }
2053
2054 /* If we get here, the bitfield extract insn can't accept a memory
2055 reference. Copy the input into a register. */
2056
2057 tem1 = gen_reg_rtx (GET_MODE (tem));
2058 emit_insn_before (gen_move_insn (tem1, tem), insn);
2059 XEXP (x, 0) = tem1;
2060 return;
2061 }
2062 break;
2063
2064 case SUBREG:
2065 if (SUBREG_REG (x) == var)
2066 {
2067 /* If this is a special SUBREG made because VAR was promoted
2068 from a wider mode, replace it with VAR and call ourself
2069 recursively, this time saying that the object previously
2070 had its current mode (by virtue of the SUBREG). */
2071
2072 if (SUBREG_PROMOTED_VAR_P (x))
2073 {
2074 *loc = var;
2075 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2076 return;
2077 }
2078
2079 /* If this SUBREG makes VAR wider, it has become a paradoxical
2080 SUBREG with VAR in memory, but these aren't allowed at this
2081 stage of the compilation. So load VAR into a pseudo and take
2082 a SUBREG of that pseudo. */
2083 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2084 {
2085 replacement = find_fixup_replacement (replacements, var);
2086 if (replacement->new == 0)
2087 replacement->new = gen_reg_rtx (GET_MODE (var));
2088 SUBREG_REG (x) = replacement->new;
2089 return;
2090 }
2091
2092 /* See if we have already found a replacement for this SUBREG.
2093 If so, use it. Otherwise, make a MEM and see if the insn
2094 is recognized. If not, or if we should force MEM into a register,
2095 make a pseudo for this SUBREG. */
2096 replacement = find_fixup_replacement (replacements, x);
2097 if (replacement->new)
2098 {
2099 *loc = replacement->new;
2100 return;
2101 }
2102
2103 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2104
2105 INSN_CODE (insn) = -1;
2106 if (! flag_force_mem && recog_memoized (insn) >= 0)
2107 return;
2108
2109 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2110 return;
2111 }
2112 break;
2113
2114 case SET:
2115 /* First do special simplification of bit-field references. */
2116 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2117 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2118 optimize_bit_field (x, insn, 0);
2119 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2120 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2121 optimize_bit_field (x, insn, NULL_PTR);
2122
2123 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2124 into a register and then store it back out. */
2125 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2126 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2127 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2128 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2129 > GET_MODE_SIZE (GET_MODE (var))))
2130 {
2131 replacement = find_fixup_replacement (replacements, var);
2132 if (replacement->new == 0)
2133 replacement->new = gen_reg_rtx (GET_MODE (var));
2134
2135 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2136 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2137 }
2138
2139 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2140 insn into a pseudo and store the low part of the pseudo into VAR. */
2141 if (GET_CODE (SET_DEST (x)) == SUBREG
2142 && SUBREG_REG (SET_DEST (x)) == var
2143 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2144 > GET_MODE_SIZE (GET_MODE (var))))
2145 {
2146 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2147 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2148 tem)),
2149 insn);
2150 break;
2151 }
2152
2153 {
2154 rtx dest = SET_DEST (x);
2155 rtx src = SET_SRC (x);
2156 #ifdef HAVE_insv
2157 rtx outerdest = dest;
2158 #endif
2159
2160 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2161 || GET_CODE (dest) == SIGN_EXTRACT
2162 || GET_CODE (dest) == ZERO_EXTRACT)
2163 dest = XEXP (dest, 0);
2164
2165 if (GET_CODE (src) == SUBREG)
2166 src = XEXP (src, 0);
2167
2168 /* If VAR does not appear at the top level of the SET
2169 just scan the lower levels of the tree. */
2170
2171 if (src != var && dest != var)
2172 break;
2173
2174 /* We will need to rerecognize this insn. */
2175 INSN_CODE (insn) = -1;
2176
2177 #ifdef HAVE_insv
2178 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2179 {
2180 /* Since this case will return, ensure we fixup all the
2181 operands here. */
2182 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2183 insn, replacements);
2184 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2185 insn, replacements);
2186 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2187 insn, replacements);
2188
2189 tem = XEXP (outerdest, 0);
2190
2191 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2192 that may appear inside a ZERO_EXTRACT.
2193 This was legitimate when the MEM was a REG. */
2194 if (GET_CODE (tem) == SUBREG
2195 && SUBREG_REG (tem) == var)
2196 tem = fixup_memory_subreg (tem, insn, 0);
2197 else
2198 tem = fixup_stack_1 (tem, insn);
2199
2200 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2201 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2202 && ! mode_dependent_address_p (XEXP (tem, 0))
2203 && ! MEM_VOLATILE_P (tem))
2204 {
2205 enum machine_mode wanted_mode;
2206 enum machine_mode is_mode = GET_MODE (tem);
2207 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2208
2209 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2210 if (wanted_mode == VOIDmode)
2211 wanted_mode = word_mode;
2212
2213 /* If we have a narrower mode, we can do something. */
2214 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2215 {
2216 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2217 rtx old_pos = XEXP (outerdest, 2);
2218 rtx newmem;
2219
2220 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2221 offset = (GET_MODE_SIZE (is_mode)
2222 - GET_MODE_SIZE (wanted_mode) - offset);
2223
2224 pos %= GET_MODE_BITSIZE (wanted_mode);
2225
2226 newmem = gen_rtx_MEM (wanted_mode,
2227 plus_constant (XEXP (tem, 0), offset));
2228 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2229 MEM_COPY_ATTRIBUTES (newmem, tem);
2230
2231 /* Make the change and see if the insn remains valid. */
2232 INSN_CODE (insn) = -1;
2233 XEXP (outerdest, 0) = newmem;
2234 XEXP (outerdest, 2) = GEN_INT (pos);
2235
2236 if (recog_memoized (insn) >= 0)
2237 return;
2238
2239 /* Otherwise, restore old position. XEXP (x, 0) will be
2240 restored later. */
2241 XEXP (outerdest, 2) = old_pos;
2242 }
2243 }
2244
2245 /* If we get here, the bit-field store doesn't allow memory
2246 or isn't located at a constant position. Load the value into
2247 a register, do the store, and put it back into memory. */
2248
2249 tem1 = gen_reg_rtx (GET_MODE (tem));
2250 emit_insn_before (gen_move_insn (tem1, tem), insn);
2251 emit_insn_after (gen_move_insn (tem, tem1), insn);
2252 XEXP (outerdest, 0) = tem1;
2253 return;
2254 }
2255 #endif
2256
2257 /* STRICT_LOW_PART is a no-op on memory references
2258 and it can cause combinations to be unrecognizable,
2259 so eliminate it. */
2260
2261 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2262 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2263
2264 /* A valid insn to copy VAR into or out of a register
2265 must be left alone, to avoid an infinite loop here.
2266 If the reference to VAR is by a subreg, fix that up,
2267 since SUBREG is not valid for a memref.
2268 Also fix up the address of the stack slot.
2269
2270 Note that we must not try to recognize the insn until
2271 after we know that we have valid addresses and no
2272 (subreg (mem ...) ...) constructs, since these interfere
2273 with determining the validity of the insn. */
2274
2275 if ((SET_SRC (x) == var
2276 || (GET_CODE (SET_SRC (x)) == SUBREG
2277 && SUBREG_REG (SET_SRC (x)) == var))
2278 && (GET_CODE (SET_DEST (x)) == REG
2279 || (GET_CODE (SET_DEST (x)) == SUBREG
2280 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2281 && GET_MODE (var) == promoted_mode
2282 && x == single_set (insn))
2283 {
2284 rtx pat;
2285
2286 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2287 if (replacement->new)
2288 SET_SRC (x) = replacement->new;
2289 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2290 SET_SRC (x) = replacement->new
2291 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2292 else
2293 SET_SRC (x) = replacement->new
2294 = fixup_stack_1 (SET_SRC (x), insn);
2295
2296 if (recog_memoized (insn) >= 0)
2297 return;
2298
2299 /* INSN is not valid, but we know that we want to
2300 copy SET_SRC (x) to SET_DEST (x) in some way. So
2301 we generate the move and see whether it requires more
2302 than one insn. If it does, we emit those insns and
2303 delete INSN. Otherwise, we an just replace the pattern
2304 of INSN; we have already verified above that INSN has
2305 no other function that to do X. */
2306
2307 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2308 if (GET_CODE (pat) == SEQUENCE)
2309 {
2310 emit_insn_after (pat, insn);
2311 PUT_CODE (insn, NOTE);
2312 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2313 NOTE_SOURCE_FILE (insn) = 0;
2314 }
2315 else
2316 PATTERN (insn) = pat;
2317
2318 return;
2319 }
2320
2321 if ((SET_DEST (x) == var
2322 || (GET_CODE (SET_DEST (x)) == SUBREG
2323 && SUBREG_REG (SET_DEST (x)) == var))
2324 && (GET_CODE (SET_SRC (x)) == REG
2325 || (GET_CODE (SET_SRC (x)) == SUBREG
2326 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2327 && GET_MODE (var) == promoted_mode
2328 && x == single_set (insn))
2329 {
2330 rtx pat;
2331
2332 if (GET_CODE (SET_DEST (x)) == SUBREG)
2333 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2334 else
2335 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2336
2337 if (recog_memoized (insn) >= 0)
2338 return;
2339
2340 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2341 if (GET_CODE (pat) == SEQUENCE)
2342 {
2343 emit_insn_after (pat, insn);
2344 PUT_CODE (insn, NOTE);
2345 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2346 NOTE_SOURCE_FILE (insn) = 0;
2347 }
2348 else
2349 PATTERN (insn) = pat;
2350
2351 return;
2352 }
2353
2354 /* Otherwise, storing into VAR must be handled specially
2355 by storing into a temporary and copying that into VAR
2356 with a new insn after this one. Note that this case
2357 will be used when storing into a promoted scalar since
2358 the insn will now have different modes on the input
2359 and output and hence will be invalid (except for the case
2360 of setting it to a constant, which does not need any
2361 change if it is valid). We generate extra code in that case,
2362 but combine.c will eliminate it. */
2363
2364 if (dest == var)
2365 {
2366 rtx temp;
2367 rtx fixeddest = SET_DEST (x);
2368
2369 /* STRICT_LOW_PART can be discarded, around a MEM. */
2370 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2371 fixeddest = XEXP (fixeddest, 0);
2372 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2373 if (GET_CODE (fixeddest) == SUBREG)
2374 {
2375 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2376 promoted_mode = GET_MODE (fixeddest);
2377 }
2378 else
2379 fixeddest = fixup_stack_1 (fixeddest, insn);
2380
2381 temp = gen_reg_rtx (promoted_mode);
2382
2383 emit_insn_after (gen_move_insn (fixeddest,
2384 gen_lowpart (GET_MODE (fixeddest),
2385 temp)),
2386 insn);
2387
2388 SET_DEST (x) = temp;
2389 }
2390 }
2391
2392 default:
2393 break;
2394 }
2395
2396 /* Nothing special about this RTX; fix its operands. */
2397
2398 fmt = GET_RTX_FORMAT (code);
2399 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2400 {
2401 if (fmt[i] == 'e')
2402 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2403 if (fmt[i] == 'E')
2404 {
2405 register int j;
2406 for (j = 0; j < XVECLEN (x, i); j++)
2407 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2408 insn, replacements);
2409 }
2410 }
2411 }
2412 \f
2413 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2414 return an rtx (MEM:m1 newaddr) which is equivalent.
2415 If any insns must be emitted to compute NEWADDR, put them before INSN.
2416
2417 UNCRITICAL nonzero means accept paradoxical subregs.
2418 This is used for subregs found inside REG_NOTES. */
2419
2420 static rtx
2421 fixup_memory_subreg (x, insn, uncritical)
2422 rtx x;
2423 rtx insn;
2424 int uncritical;
2425 {
2426 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2427 rtx addr = XEXP (SUBREG_REG (x), 0);
2428 enum machine_mode mode = GET_MODE (x);
2429 rtx result;
2430
2431 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2432 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2433 && ! uncritical)
2434 abort ();
2435
2436 if (BYTES_BIG_ENDIAN)
2437 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2438 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2439 addr = plus_constant (addr, offset);
2440 if (!flag_force_addr && memory_address_p (mode, addr))
2441 /* Shortcut if no insns need be emitted. */
2442 return change_address (SUBREG_REG (x), mode, addr);
2443 start_sequence ();
2444 result = change_address (SUBREG_REG (x), mode, addr);
2445 emit_insn_before (gen_sequence (), insn);
2446 end_sequence ();
2447 return result;
2448 }
2449
2450 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2451 Replace subexpressions of X in place.
2452 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2453 Otherwise return X, with its contents possibly altered.
2454
2455 If any insns must be emitted to compute NEWADDR, put them before INSN.
2456
2457 UNCRITICAL is as in fixup_memory_subreg. */
2458
2459 static rtx
2460 walk_fixup_memory_subreg (x, insn, uncritical)
2461 register rtx x;
2462 rtx insn;
2463 int uncritical;
2464 {
2465 register enum rtx_code code;
2466 register char *fmt;
2467 register int i;
2468
2469 if (x == 0)
2470 return 0;
2471
2472 code = GET_CODE (x);
2473
2474 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2475 return fixup_memory_subreg (x, insn, uncritical);
2476
2477 /* Nothing special about this RTX; fix its operands. */
2478
2479 fmt = GET_RTX_FORMAT (code);
2480 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2481 {
2482 if (fmt[i] == 'e')
2483 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2484 if (fmt[i] == 'E')
2485 {
2486 register int j;
2487 for (j = 0; j < XVECLEN (x, i); j++)
2488 XVECEXP (x, i, j)
2489 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2490 }
2491 }
2492 return x;
2493 }
2494 \f
2495 /* For each memory ref within X, if it refers to a stack slot
2496 with an out of range displacement, put the address in a temp register
2497 (emitting new insns before INSN to load these registers)
2498 and alter the memory ref to use that register.
2499 Replace each such MEM rtx with a copy, to avoid clobberage. */
2500
2501 static rtx
2502 fixup_stack_1 (x, insn)
2503 rtx x;
2504 rtx insn;
2505 {
2506 register int i;
2507 register RTX_CODE code = GET_CODE (x);
2508 register char *fmt;
2509
2510 if (code == MEM)
2511 {
2512 register rtx ad = XEXP (x, 0);
2513 /* If we have address of a stack slot but it's not valid
2514 (displacement is too large), compute the sum in a register. */
2515 if (GET_CODE (ad) == PLUS
2516 && GET_CODE (XEXP (ad, 0)) == REG
2517 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2518 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2519 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2520 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2521 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2522 #endif
2523 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2524 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2525 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2526 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2527 {
2528 rtx temp, seq;
2529 if (memory_address_p (GET_MODE (x), ad))
2530 return x;
2531
2532 start_sequence ();
2533 temp = copy_to_reg (ad);
2534 seq = gen_sequence ();
2535 end_sequence ();
2536 emit_insn_before (seq, insn);
2537 return change_address (x, VOIDmode, temp);
2538 }
2539 return x;
2540 }
2541
2542 fmt = GET_RTX_FORMAT (code);
2543 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2544 {
2545 if (fmt[i] == 'e')
2546 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2547 if (fmt[i] == 'E')
2548 {
2549 register int j;
2550 for (j = 0; j < XVECLEN (x, i); j++)
2551 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2552 }
2553 }
2554 return x;
2555 }
2556 \f
2557 /* Optimization: a bit-field instruction whose field
2558 happens to be a byte or halfword in memory
2559 can be changed to a move instruction.
2560
2561 We call here when INSN is an insn to examine or store into a bit-field.
2562 BODY is the SET-rtx to be altered.
2563
2564 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2565 (Currently this is called only from function.c, and EQUIV_MEM
2566 is always 0.) */
2567
2568 static void
2569 optimize_bit_field (body, insn, equiv_mem)
2570 rtx body;
2571 rtx insn;
2572 rtx *equiv_mem;
2573 {
2574 register rtx bitfield;
2575 int destflag;
2576 rtx seq = 0;
2577 enum machine_mode mode;
2578
2579 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2580 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2581 bitfield = SET_DEST (body), destflag = 1;
2582 else
2583 bitfield = SET_SRC (body), destflag = 0;
2584
2585 /* First check that the field being stored has constant size and position
2586 and is in fact a byte or halfword suitably aligned. */
2587
2588 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2589 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2590 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2591 != BLKmode)
2592 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2593 {
2594 register rtx memref = 0;
2595
2596 /* Now check that the containing word is memory, not a register,
2597 and that it is safe to change the machine mode. */
2598
2599 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2600 memref = XEXP (bitfield, 0);
2601 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2602 && equiv_mem != 0)
2603 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2604 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2605 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2606 memref = SUBREG_REG (XEXP (bitfield, 0));
2607 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2608 && equiv_mem != 0
2609 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2610 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2611
2612 if (memref
2613 && ! mode_dependent_address_p (XEXP (memref, 0))
2614 && ! MEM_VOLATILE_P (memref))
2615 {
2616 /* Now adjust the address, first for any subreg'ing
2617 that we are now getting rid of,
2618 and then for which byte of the word is wanted. */
2619
2620 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2621 rtx insns;
2622
2623 /* Adjust OFFSET to count bits from low-address byte. */
2624 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2625 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2626 - offset - INTVAL (XEXP (bitfield, 1)));
2627
2628 /* Adjust OFFSET to count bytes from low-address byte. */
2629 offset /= BITS_PER_UNIT;
2630 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2631 {
2632 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2633 if (BYTES_BIG_ENDIAN)
2634 offset -= (MIN (UNITS_PER_WORD,
2635 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2636 - MIN (UNITS_PER_WORD,
2637 GET_MODE_SIZE (GET_MODE (memref))));
2638 }
2639
2640 start_sequence ();
2641 memref = change_address (memref, mode,
2642 plus_constant (XEXP (memref, 0), offset));
2643 insns = get_insns ();
2644 end_sequence ();
2645 emit_insns_before (insns, insn);
2646
2647 /* Store this memory reference where
2648 we found the bit field reference. */
2649
2650 if (destflag)
2651 {
2652 validate_change (insn, &SET_DEST (body), memref, 1);
2653 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2654 {
2655 rtx src = SET_SRC (body);
2656 while (GET_CODE (src) == SUBREG
2657 && SUBREG_WORD (src) == 0)
2658 src = SUBREG_REG (src);
2659 if (GET_MODE (src) != GET_MODE (memref))
2660 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2661 validate_change (insn, &SET_SRC (body), src, 1);
2662 }
2663 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2664 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2665 /* This shouldn't happen because anything that didn't have
2666 one of these modes should have got converted explicitly
2667 and then referenced through a subreg.
2668 This is so because the original bit-field was
2669 handled by agg_mode and so its tree structure had
2670 the same mode that memref now has. */
2671 abort ();
2672 }
2673 else
2674 {
2675 rtx dest = SET_DEST (body);
2676
2677 while (GET_CODE (dest) == SUBREG
2678 && SUBREG_WORD (dest) == 0
2679 && (GET_MODE_CLASS (GET_MODE (dest))
2680 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2681 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2682 <= UNITS_PER_WORD))
2683 dest = SUBREG_REG (dest);
2684
2685 validate_change (insn, &SET_DEST (body), dest, 1);
2686
2687 if (GET_MODE (dest) == GET_MODE (memref))
2688 validate_change (insn, &SET_SRC (body), memref, 1);
2689 else
2690 {
2691 /* Convert the mem ref to the destination mode. */
2692 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2693
2694 start_sequence ();
2695 convert_move (newreg, memref,
2696 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2697 seq = get_insns ();
2698 end_sequence ();
2699
2700 validate_change (insn, &SET_SRC (body), newreg, 1);
2701 }
2702 }
2703
2704 /* See if we can convert this extraction or insertion into
2705 a simple move insn. We might not be able to do so if this
2706 was, for example, part of a PARALLEL.
2707
2708 If we succeed, write out any needed conversions. If we fail,
2709 it is hard to guess why we failed, so don't do anything
2710 special; just let the optimization be suppressed. */
2711
2712 if (apply_change_group () && seq)
2713 emit_insns_before (seq, insn);
2714 }
2715 }
2716 }
2717 \f
2718 /* These routines are responsible for converting virtual register references
2719 to the actual hard register references once RTL generation is complete.
2720
2721 The following four variables are used for communication between the
2722 routines. They contain the offsets of the virtual registers from their
2723 respective hard registers. */
2724
2725 static int in_arg_offset;
2726 static int var_offset;
2727 static int dynamic_offset;
2728 static int out_arg_offset;
2729 static int cfa_offset;
2730
2731 /* In most machines, the stack pointer register is equivalent to the bottom
2732 of the stack. */
2733
2734 #ifndef STACK_POINTER_OFFSET
2735 #define STACK_POINTER_OFFSET 0
2736 #endif
2737
2738 /* If not defined, pick an appropriate default for the offset of dynamically
2739 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2740 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2741
2742 #ifndef STACK_DYNAMIC_OFFSET
2743
2744 #ifdef ACCUMULATE_OUTGOING_ARGS
2745 /* The bottom of the stack points to the actual arguments. If
2746 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2747 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2748 stack space for register parameters is not pushed by the caller, but
2749 rather part of the fixed stack areas and hence not included in
2750 `current_function_outgoing_args_size'. Nevertheless, we must allow
2751 for it when allocating stack dynamic objects. */
2752
2753 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2754 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2755 (current_function_outgoing_args_size \
2756 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2757
2758 #else
2759 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2760 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2761 #endif
2762
2763 #else
2764 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2765 #endif
2766 #endif
2767
2768 /* On a few machines, the CFA coincides with the arg pointer. */
2769
2770 #ifndef ARG_POINTER_CFA_OFFSET
2771 #define ARG_POINTER_CFA_OFFSET 0
2772 #endif
2773
2774
2775 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2776 its address taken. DECL is the decl for the object stored in the
2777 register, for later use if we do need to force REG into the stack.
2778 REG is overwritten by the MEM like in put_reg_into_stack. */
2779
2780 rtx
2781 gen_mem_addressof (reg, decl)
2782 rtx reg;
2783 tree decl;
2784 {
2785 tree type = TREE_TYPE (decl);
2786 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2787 SET_ADDRESSOF_DECL (r, decl);
2788 /* If the original REG was a user-variable, then so is the REG whose
2789 address is being taken. */
2790 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2791
2792 XEXP (reg, 0) = r;
2793 PUT_CODE (reg, MEM);
2794 PUT_MODE (reg, DECL_MODE (decl));
2795 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2796 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2797 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2798
2799 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2800 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2801
2802 return reg;
2803 }
2804
2805 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2806
2807 void
2808 flush_addressof (decl)
2809 tree decl;
2810 {
2811 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2812 && DECL_RTL (decl) != 0
2813 && GET_CODE (DECL_RTL (decl)) == MEM
2814 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2815 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2816 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2817 }
2818
2819 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2820
2821 static void
2822 put_addressof_into_stack (r)
2823 rtx r;
2824 {
2825 tree decl = ADDRESSOF_DECL (r);
2826 rtx reg = XEXP (r, 0);
2827
2828 if (GET_CODE (reg) != REG)
2829 abort ();
2830
2831 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2832 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2833 ADDRESSOF_REGNO (r),
2834 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2835 }
2836
2837 /* List of replacements made below in purge_addressof_1 when creating
2838 bitfield insertions. */
2839 static rtx purge_addressof_replacements;
2840
2841 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2842 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2843 the stack. */
2844
2845 static void
2846 purge_addressof_1 (loc, insn, force, store)
2847 rtx *loc;
2848 rtx insn;
2849 int force, store;
2850 {
2851 rtx x;
2852 RTX_CODE code;
2853 int i, j;
2854 char *fmt;
2855
2856 /* Re-start here to avoid recursion in common cases. */
2857 restart:
2858
2859 x = *loc;
2860 if (x == 0)
2861 return;
2862
2863 code = GET_CODE (x);
2864
2865 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2866 {
2867 rtx insns;
2868 /* We must create a copy of the rtx because it was created by
2869 overwriting a REG rtx which is always shared. */
2870 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2871
2872 if (validate_change (insn, loc, sub, 0)
2873 || validate_replace_rtx (x, sub, insn))
2874 return;
2875
2876 start_sequence ();
2877 sub = force_operand (sub, NULL_RTX);
2878 if (! validate_change (insn, loc, sub, 0)
2879 && ! validate_replace_rtx (x, sub, insn))
2880 abort ();
2881
2882 insns = gen_sequence ();
2883 end_sequence ();
2884 emit_insn_before (insns, insn);
2885 return;
2886 }
2887 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2888 {
2889 rtx sub = XEXP (XEXP (x, 0), 0);
2890 rtx sub2;
2891
2892 if (GET_CODE (sub) == MEM)
2893 {
2894 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2895 MEM_COPY_ATTRIBUTES (sub2, sub);
2896 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2897 sub = sub2;
2898 }
2899
2900 if (GET_CODE (sub) == REG
2901 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2902 {
2903 put_addressof_into_stack (XEXP (x, 0));
2904 return;
2905 }
2906 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2907 {
2908 int size_x, size_sub;
2909
2910 if (!insn)
2911 {
2912 /* When processing REG_NOTES look at the list of
2913 replacements done on the insn to find the register that X
2914 was replaced by. */
2915 rtx tem;
2916
2917 for (tem = purge_addressof_replacements; tem != NULL_RTX;
2918 tem = XEXP (XEXP (tem, 1), 1))
2919 {
2920 rtx y = XEXP (tem, 0);
2921 if (GET_CODE (y) == MEM
2922 && rtx_equal_p (XEXP (x, 0), XEXP (y, 0)))
2923 {
2924 /* It can happen that the note may speak of things in
2925 a wider (or just different) mode than the code did.
2926 This is especially true of REG_RETVAL. */
2927
2928 rtx z = XEXP (XEXP (tem, 1), 0);
2929 if (GET_MODE (x) != GET_MODE (y))
2930 {
2931 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2932 z = SUBREG_REG (z);
2933
2934 /* ??? If we'd gotten into any of the really complex
2935 cases below, I'm not sure we can do a proper
2936 replacement. Might we be able to delete the
2937 note in some cases? */
2938 if (GET_MODE_SIZE (GET_MODE (x))
2939 < GET_MODE_SIZE (GET_MODE (y)))
2940 abort ();
2941
2942 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2943 && (GET_MODE_SIZE (GET_MODE (x))
2944 > GET_MODE_SIZE (GET_MODE (z))))
2945 {
2946 /* This can occur as a result in invalid
2947 pointer casts, e.g. float f; ...
2948 *(long long int *)&f.
2949 ??? We could emit a warning here, but
2950 without a line number that wouldn't be
2951 very helpful. */
2952 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2953 }
2954 else
2955 z = gen_lowpart (GET_MODE (x), z);
2956 }
2957
2958 *loc = z;
2959 return;
2960 }
2961 }
2962
2963 /* There should always be such a replacement. */
2964 abort ();
2965 }
2966
2967 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2968 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2969
2970 /* Don't even consider working with paradoxical subregs,
2971 or the moral equivalent seen here. */
2972 if (size_x <= size_sub
2973 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2974 {
2975 /* Do a bitfield insertion to mirror what would happen
2976 in memory. */
2977
2978 rtx val, seq;
2979
2980 if (store)
2981 {
2982 rtx p;
2983
2984 start_sequence ();
2985 val = gen_reg_rtx (GET_MODE (x));
2986 if (! validate_change (insn, loc, val, 0))
2987 {
2988 /* Discard the current sequence and put the
2989 ADDRESSOF on stack. */
2990 end_sequence ();
2991 goto give_up;
2992 }
2993 seq = gen_sequence ();
2994 end_sequence ();
2995 emit_insn_before (seq, insn);
2996
2997 start_sequence ();
2998 store_bit_field (sub, size_x, 0, GET_MODE (x),
2999 val, GET_MODE_SIZE (GET_MODE (sub)),
3000 GET_MODE_SIZE (GET_MODE (sub)));
3001
3002 /* Make sure to unshare any shared rtl that store_bit_field
3003 might have created. */
3004 for (p = get_insns(); p; p = NEXT_INSN (p))
3005 {
3006 reset_used_flags (PATTERN (p));
3007 reset_used_flags (REG_NOTES (p));
3008 reset_used_flags (LOG_LINKS (p));
3009 }
3010 unshare_all_rtl (get_insns ());
3011
3012 seq = gen_sequence ();
3013 end_sequence ();
3014 emit_insn_after (seq, insn);
3015 }
3016 else
3017 {
3018 start_sequence ();
3019 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3020 GET_MODE (x), GET_MODE (x),
3021 GET_MODE_SIZE (GET_MODE (sub)),
3022 GET_MODE_SIZE (GET_MODE (sub)));
3023
3024 if (! validate_change (insn, loc, val, 0))
3025 {
3026 /* Discard the current sequence and put the
3027 ADDRESSOF on stack. */
3028 end_sequence ();
3029 goto give_up;
3030 }
3031
3032 seq = gen_sequence ();
3033 end_sequence ();
3034 emit_insn_before (seq, insn);
3035 }
3036
3037 /* Remember the replacement so that the same one can be done
3038 on the REG_NOTES. */
3039 purge_addressof_replacements
3040 = gen_rtx_EXPR_LIST (VOIDmode, x,
3041 gen_rtx_EXPR_LIST (VOIDmode, val,
3042 purge_addressof_replacements));
3043
3044 /* We replaced with a reg -- all done. */
3045 return;
3046 }
3047 }
3048 else if (validate_change (insn, loc, sub, 0))
3049 {
3050 /* Remember the replacement so that the same one can be done
3051 on the REG_NOTES. */
3052 purge_addressof_replacements
3053 = gen_rtx_EXPR_LIST (VOIDmode, x,
3054 gen_rtx_EXPR_LIST (VOIDmode, sub,
3055 purge_addressof_replacements));
3056 goto restart;
3057 }
3058 give_up:;
3059 /* else give up and put it into the stack */
3060 }
3061 else if (code == ADDRESSOF)
3062 {
3063 put_addressof_into_stack (x);
3064 return;
3065 }
3066 else if (code == SET)
3067 {
3068 purge_addressof_1 (&SET_DEST (x), insn, force, 1);
3069 purge_addressof_1 (&SET_SRC (x), insn, force, 0);
3070 return;
3071 }
3072
3073 /* Scan all subexpressions. */
3074 fmt = GET_RTX_FORMAT (code);
3075 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3076 {
3077 if (*fmt == 'e')
3078 purge_addressof_1 (&XEXP (x, i), insn, force, 0);
3079 else if (*fmt == 'E')
3080 for (j = 0; j < XVECLEN (x, i); j++)
3081 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0);
3082 }
3083 }
3084
3085 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3086 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3087 stack. */
3088
3089 void
3090 purge_addressof (insns)
3091 rtx insns;
3092 {
3093 rtx insn;
3094 for (insn = insns; insn; insn = NEXT_INSN (insn))
3095 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3096 || GET_CODE (insn) == CALL_INSN)
3097 {
3098 purge_addressof_1 (&PATTERN (insn), insn,
3099 asm_noperands (PATTERN (insn)) > 0, 0);
3100 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0);
3101 }
3102 purge_addressof_replacements = 0;
3103 }
3104 \f
3105 /* Pass through the INSNS of function FNDECL and convert virtual register
3106 references to hard register references. */
3107
3108 void
3109 instantiate_virtual_regs (fndecl, insns)
3110 tree fndecl;
3111 rtx insns;
3112 {
3113 rtx insn;
3114 int i;
3115
3116 /* Compute the offsets to use for this function. */
3117 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3118 var_offset = STARTING_FRAME_OFFSET;
3119 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3120 out_arg_offset = STACK_POINTER_OFFSET;
3121 cfa_offset = ARG_POINTER_CFA_OFFSET;
3122
3123 /* Scan all variables and parameters of this function. For each that is
3124 in memory, instantiate all virtual registers if the result is a valid
3125 address. If not, we do it later. That will handle most uses of virtual
3126 regs on many machines. */
3127 instantiate_decls (fndecl, 1);
3128
3129 /* Initialize recognition, indicating that volatile is OK. */
3130 init_recog ();
3131
3132 /* Scan through all the insns, instantiating every virtual register still
3133 present. */
3134 for (insn = insns; insn; insn = NEXT_INSN (insn))
3135 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3136 || GET_CODE (insn) == CALL_INSN)
3137 {
3138 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3139 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3140 }
3141
3142 /* Instantiate the stack slots for the parm registers, for later use in
3143 addressof elimination. */
3144 for (i = 0; i < max_parm_reg; ++i)
3145 if (parm_reg_stack_loc[i])
3146 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3147
3148 /* Now instantiate the remaining register equivalences for debugging info.
3149 These will not be valid addresses. */
3150 instantiate_decls (fndecl, 0);
3151
3152 /* Indicate that, from now on, assign_stack_local should use
3153 frame_pointer_rtx. */
3154 virtuals_instantiated = 1;
3155 }
3156
3157 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3158 all virtual registers in their DECL_RTL's.
3159
3160 If VALID_ONLY, do this only if the resulting address is still valid.
3161 Otherwise, always do it. */
3162
3163 static void
3164 instantiate_decls (fndecl, valid_only)
3165 tree fndecl;
3166 int valid_only;
3167 {
3168 tree decl;
3169
3170 if (DECL_SAVED_INSNS (fndecl))
3171 /* When compiling an inline function, the obstack used for
3172 rtl allocation is the maybepermanent_obstack. Calling
3173 `resume_temporary_allocation' switches us back to that
3174 obstack while we process this function's parameters. */
3175 resume_temporary_allocation ();
3176
3177 /* Process all parameters of the function. */
3178 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3179 {
3180 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3181
3182 instantiate_decl (DECL_RTL (decl), size, valid_only);
3183
3184 /* If the parameter was promoted, then the incoming RTL mode may be
3185 larger than the declared type size. We must use the larger of
3186 the two sizes. */
3187 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3188 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3189 }
3190
3191 /* Now process all variables defined in the function or its subblocks. */
3192 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3193
3194 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3195 {
3196 /* Save all rtl allocated for this function by raising the
3197 high-water mark on the maybepermanent_obstack. */
3198 preserve_data ();
3199 /* All further rtl allocation is now done in the current_obstack. */
3200 rtl_in_current_obstack ();
3201 }
3202 }
3203
3204 /* Subroutine of instantiate_decls: Process all decls in the given
3205 BLOCK node and all its subblocks. */
3206
3207 static void
3208 instantiate_decls_1 (let, valid_only)
3209 tree let;
3210 int valid_only;
3211 {
3212 tree t;
3213
3214 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3215 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3216 valid_only);
3217
3218 /* Process all subblocks. */
3219 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3220 instantiate_decls_1 (t, valid_only);
3221 }
3222
3223 /* Subroutine of the preceding procedures: Given RTL representing a
3224 decl and the size of the object, do any instantiation required.
3225
3226 If VALID_ONLY is non-zero, it means that the RTL should only be
3227 changed if the new address is valid. */
3228
3229 static void
3230 instantiate_decl (x, size, valid_only)
3231 rtx x;
3232 int size;
3233 int valid_only;
3234 {
3235 enum machine_mode mode;
3236 rtx addr;
3237
3238 /* If this is not a MEM, no need to do anything. Similarly if the
3239 address is a constant or a register that is not a virtual register. */
3240
3241 if (x == 0 || GET_CODE (x) != MEM)
3242 return;
3243
3244 addr = XEXP (x, 0);
3245 if (CONSTANT_P (addr)
3246 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3247 || (GET_CODE (addr) == REG
3248 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3249 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3250 return;
3251
3252 /* If we should only do this if the address is valid, copy the address.
3253 We need to do this so we can undo any changes that might make the
3254 address invalid. This copy is unfortunate, but probably can't be
3255 avoided. */
3256
3257 if (valid_only)
3258 addr = copy_rtx (addr);
3259
3260 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3261
3262 if (valid_only)
3263 {
3264 /* Now verify that the resulting address is valid for every integer or
3265 floating-point mode up to and including SIZE bytes long. We do this
3266 since the object might be accessed in any mode and frame addresses
3267 are shared. */
3268
3269 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3270 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3271 mode = GET_MODE_WIDER_MODE (mode))
3272 if (! memory_address_p (mode, addr))
3273 return;
3274
3275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3276 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3277 mode = GET_MODE_WIDER_MODE (mode))
3278 if (! memory_address_p (mode, addr))
3279 return;
3280 }
3281
3282 /* Put back the address now that we have updated it and we either know
3283 it is valid or we don't care whether it is valid. */
3284
3285 XEXP (x, 0) = addr;
3286 }
3287 \f
3288 /* Given a pointer to a piece of rtx and an optional pointer to the
3289 containing object, instantiate any virtual registers present in it.
3290
3291 If EXTRA_INSNS, we always do the replacement and generate
3292 any extra insns before OBJECT. If it zero, we do nothing if replacement
3293 is not valid.
3294
3295 Return 1 if we either had nothing to do or if we were able to do the
3296 needed replacement. Return 0 otherwise; we only return zero if
3297 EXTRA_INSNS is zero.
3298
3299 We first try some simple transformations to avoid the creation of extra
3300 pseudos. */
3301
3302 static int
3303 instantiate_virtual_regs_1 (loc, object, extra_insns)
3304 rtx *loc;
3305 rtx object;
3306 int extra_insns;
3307 {
3308 rtx x;
3309 RTX_CODE code;
3310 rtx new = 0;
3311 HOST_WIDE_INT offset;
3312 rtx temp;
3313 rtx seq;
3314 int i, j;
3315 char *fmt;
3316
3317 /* Re-start here to avoid recursion in common cases. */
3318 restart:
3319
3320 x = *loc;
3321 if (x == 0)
3322 return 1;
3323
3324 code = GET_CODE (x);
3325
3326 /* Check for some special cases. */
3327 switch (code)
3328 {
3329 case CONST_INT:
3330 case CONST_DOUBLE:
3331 case CONST:
3332 case SYMBOL_REF:
3333 case CODE_LABEL:
3334 case PC:
3335 case CC0:
3336 case ASM_INPUT:
3337 case ADDR_VEC:
3338 case ADDR_DIFF_VEC:
3339 case RETURN:
3340 return 1;
3341
3342 case SET:
3343 /* We are allowed to set the virtual registers. This means that
3344 the actual register should receive the source minus the
3345 appropriate offset. This is used, for example, in the handling
3346 of non-local gotos. */
3347 if (SET_DEST (x) == virtual_incoming_args_rtx)
3348 new = arg_pointer_rtx, offset = - in_arg_offset;
3349 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3350 new = frame_pointer_rtx, offset = - var_offset;
3351 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3352 new = stack_pointer_rtx, offset = - dynamic_offset;
3353 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3354 new = stack_pointer_rtx, offset = - out_arg_offset;
3355 else if (SET_DEST (x) == virtual_cfa_rtx)
3356 new = arg_pointer_rtx, offset = - cfa_offset;
3357
3358 if (new)
3359 {
3360 /* The only valid sources here are PLUS or REG. Just do
3361 the simplest possible thing to handle them. */
3362 if (GET_CODE (SET_SRC (x)) != REG
3363 && GET_CODE (SET_SRC (x)) != PLUS)
3364 abort ();
3365
3366 start_sequence ();
3367 if (GET_CODE (SET_SRC (x)) != REG)
3368 temp = force_operand (SET_SRC (x), NULL_RTX);
3369 else
3370 temp = SET_SRC (x);
3371 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3372 seq = get_insns ();
3373 end_sequence ();
3374
3375 emit_insns_before (seq, object);
3376 SET_DEST (x) = new;
3377
3378 if (! validate_change (object, &SET_SRC (x), temp, 0)
3379 || ! extra_insns)
3380 abort ();
3381
3382 return 1;
3383 }
3384
3385 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3386 loc = &SET_SRC (x);
3387 goto restart;
3388
3389 case PLUS:
3390 /* Handle special case of virtual register plus constant. */
3391 if (CONSTANT_P (XEXP (x, 1)))
3392 {
3393 rtx old, new_offset;
3394
3395 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3396 if (GET_CODE (XEXP (x, 0)) == PLUS)
3397 {
3398 rtx inner = XEXP (XEXP (x, 0), 0);
3399
3400 if (inner == virtual_incoming_args_rtx)
3401 new = arg_pointer_rtx, offset = in_arg_offset;
3402 else if (inner == virtual_stack_vars_rtx)
3403 new = frame_pointer_rtx, offset = var_offset;
3404 else if (inner == virtual_stack_dynamic_rtx)
3405 new = stack_pointer_rtx, offset = dynamic_offset;
3406 else if (inner == virtual_outgoing_args_rtx)
3407 new = stack_pointer_rtx, offset = out_arg_offset;
3408 else if (inner == virtual_cfa_rtx)
3409 new = arg_pointer_rtx, offset = cfa_offset;
3410 else
3411 {
3412 loc = &XEXP (x, 0);
3413 goto restart;
3414 }
3415
3416 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3417 extra_insns);
3418 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3419 }
3420
3421 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3422 new = arg_pointer_rtx, offset = in_arg_offset;
3423 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3424 new = frame_pointer_rtx, offset = var_offset;
3425 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3426 new = stack_pointer_rtx, offset = dynamic_offset;
3427 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3428 new = stack_pointer_rtx, offset = out_arg_offset;
3429 else if (XEXP (x, 0) == virtual_cfa_rtx)
3430 new = arg_pointer_rtx, offset = cfa_offset;
3431 else
3432 {
3433 /* We know the second operand is a constant. Unless the
3434 first operand is a REG (which has been already checked),
3435 it needs to be checked. */
3436 if (GET_CODE (XEXP (x, 0)) != REG)
3437 {
3438 loc = &XEXP (x, 0);
3439 goto restart;
3440 }
3441 return 1;
3442 }
3443
3444 new_offset = plus_constant (XEXP (x, 1), offset);
3445
3446 /* If the new constant is zero, try to replace the sum with just
3447 the register. */
3448 if (new_offset == const0_rtx
3449 && validate_change (object, loc, new, 0))
3450 return 1;
3451
3452 /* Next try to replace the register and new offset.
3453 There are two changes to validate here and we can't assume that
3454 in the case of old offset equals new just changing the register
3455 will yield a valid insn. In the interests of a little efficiency,
3456 however, we only call validate change once (we don't queue up the
3457 changes and then call apply_change_group). */
3458
3459 old = XEXP (x, 0);
3460 if (offset == 0
3461 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3462 : (XEXP (x, 0) = new,
3463 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3464 {
3465 if (! extra_insns)
3466 {
3467 XEXP (x, 0) = old;
3468 return 0;
3469 }
3470
3471 /* Otherwise copy the new constant into a register and replace
3472 constant with that register. */
3473 temp = gen_reg_rtx (Pmode);
3474 XEXP (x, 0) = new;
3475 if (validate_change (object, &XEXP (x, 1), temp, 0))
3476 emit_insn_before (gen_move_insn (temp, new_offset), object);
3477 else
3478 {
3479 /* If that didn't work, replace this expression with a
3480 register containing the sum. */
3481
3482 XEXP (x, 0) = old;
3483 new = gen_rtx_PLUS (Pmode, new, new_offset);
3484
3485 start_sequence ();
3486 temp = force_operand (new, NULL_RTX);
3487 seq = get_insns ();
3488 end_sequence ();
3489
3490 emit_insns_before (seq, object);
3491 if (! validate_change (object, loc, temp, 0)
3492 && ! validate_replace_rtx (x, temp, object))
3493 abort ();
3494 }
3495 }
3496
3497 return 1;
3498 }
3499
3500 /* Fall through to generic two-operand expression case. */
3501 case EXPR_LIST:
3502 case CALL:
3503 case COMPARE:
3504 case MINUS:
3505 case MULT:
3506 case DIV: case UDIV:
3507 case MOD: case UMOD:
3508 case AND: case IOR: case XOR:
3509 case ROTATERT: case ROTATE:
3510 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3511 case NE: case EQ:
3512 case GE: case GT: case GEU: case GTU:
3513 case LE: case LT: case LEU: case LTU:
3514 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3515 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3516 loc = &XEXP (x, 0);
3517 goto restart;
3518
3519 case MEM:
3520 /* Most cases of MEM that convert to valid addresses have already been
3521 handled by our scan of decls. The only special handling we
3522 need here is to make a copy of the rtx to ensure it isn't being
3523 shared if we have to change it to a pseudo.
3524
3525 If the rtx is a simple reference to an address via a virtual register,
3526 it can potentially be shared. In such cases, first try to make it
3527 a valid address, which can also be shared. Otherwise, copy it and
3528 proceed normally.
3529
3530 First check for common cases that need no processing. These are
3531 usually due to instantiation already being done on a previous instance
3532 of a shared rtx. */
3533
3534 temp = XEXP (x, 0);
3535 if (CONSTANT_ADDRESS_P (temp)
3536 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3537 || temp == arg_pointer_rtx
3538 #endif
3539 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3540 || temp == hard_frame_pointer_rtx
3541 #endif
3542 || temp == frame_pointer_rtx)
3543 return 1;
3544
3545 if (GET_CODE (temp) == PLUS
3546 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3547 && (XEXP (temp, 0) == frame_pointer_rtx
3548 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3549 || XEXP (temp, 0) == hard_frame_pointer_rtx
3550 #endif
3551 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3552 || XEXP (temp, 0) == arg_pointer_rtx
3553 #endif
3554 ))
3555 return 1;
3556
3557 if (temp == virtual_stack_vars_rtx
3558 || temp == virtual_incoming_args_rtx
3559 || (GET_CODE (temp) == PLUS
3560 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3561 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3562 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3563 {
3564 /* This MEM may be shared. If the substitution can be done without
3565 the need to generate new pseudos, we want to do it in place
3566 so all copies of the shared rtx benefit. The call below will
3567 only make substitutions if the resulting address is still
3568 valid.
3569
3570 Note that we cannot pass X as the object in the recursive call
3571 since the insn being processed may not allow all valid
3572 addresses. However, if we were not passed on object, we can
3573 only modify X without copying it if X will have a valid
3574 address.
3575
3576 ??? Also note that this can still lose if OBJECT is an insn that
3577 has less restrictions on an address that some other insn.
3578 In that case, we will modify the shared address. This case
3579 doesn't seem very likely, though. One case where this could
3580 happen is in the case of a USE or CLOBBER reference, but we
3581 take care of that below. */
3582
3583 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3584 object ? object : x, 0))
3585 return 1;
3586
3587 /* Otherwise make a copy and process that copy. We copy the entire
3588 RTL expression since it might be a PLUS which could also be
3589 shared. */
3590 *loc = x = copy_rtx (x);
3591 }
3592
3593 /* Fall through to generic unary operation case. */
3594 case SUBREG:
3595 case STRICT_LOW_PART:
3596 case NEG: case NOT:
3597 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3598 case SIGN_EXTEND: case ZERO_EXTEND:
3599 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3600 case FLOAT: case FIX:
3601 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3602 case ABS:
3603 case SQRT:
3604 case FFS:
3605 /* These case either have just one operand or we know that we need not
3606 check the rest of the operands. */
3607 loc = &XEXP (x, 0);
3608 goto restart;
3609
3610 case USE:
3611 case CLOBBER:
3612 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3613 go ahead and make the invalid one, but do it to a copy. For a REG,
3614 just make the recursive call, since there's no chance of a problem. */
3615
3616 if ((GET_CODE (XEXP (x, 0)) == MEM
3617 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3618 0))
3619 || (GET_CODE (XEXP (x, 0)) == REG
3620 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3621 return 1;
3622
3623 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3624 loc = &XEXP (x, 0);
3625 goto restart;
3626
3627 case REG:
3628 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3629 in front of this insn and substitute the temporary. */
3630 if (x == virtual_incoming_args_rtx)
3631 new = arg_pointer_rtx, offset = in_arg_offset;
3632 else if (x == virtual_stack_vars_rtx)
3633 new = frame_pointer_rtx, offset = var_offset;
3634 else if (x == virtual_stack_dynamic_rtx)
3635 new = stack_pointer_rtx, offset = dynamic_offset;
3636 else if (x == virtual_outgoing_args_rtx)
3637 new = stack_pointer_rtx, offset = out_arg_offset;
3638 else if (x == virtual_cfa_rtx)
3639 new = arg_pointer_rtx, offset = cfa_offset;
3640
3641 if (new)
3642 {
3643 temp = plus_constant (new, offset);
3644 if (!validate_change (object, loc, temp, 0))
3645 {
3646 if (! extra_insns)
3647 return 0;
3648
3649 start_sequence ();
3650 temp = force_operand (temp, NULL_RTX);
3651 seq = get_insns ();
3652 end_sequence ();
3653
3654 emit_insns_before (seq, object);
3655 if (! validate_change (object, loc, temp, 0)
3656 && ! validate_replace_rtx (x, temp, object))
3657 abort ();
3658 }
3659 }
3660
3661 return 1;
3662
3663 case ADDRESSOF:
3664 if (GET_CODE (XEXP (x, 0)) == REG)
3665 return 1;
3666
3667 else if (GET_CODE (XEXP (x, 0)) == MEM)
3668 {
3669 /* If we have a (addressof (mem ..)), do any instantiation inside
3670 since we know we'll be making the inside valid when we finally
3671 remove the ADDRESSOF. */
3672 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3673 return 1;
3674 }
3675 break;
3676
3677 default:
3678 break;
3679 }
3680
3681 /* Scan all subexpressions. */
3682 fmt = GET_RTX_FORMAT (code);
3683 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3684 if (*fmt == 'e')
3685 {
3686 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3687 return 0;
3688 }
3689 else if (*fmt == 'E')
3690 for (j = 0; j < XVECLEN (x, i); j++)
3691 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3692 extra_insns))
3693 return 0;
3694
3695 return 1;
3696 }
3697 \f
3698 /* Optimization: assuming this function does not receive nonlocal gotos,
3699 delete the handlers for such, as well as the insns to establish
3700 and disestablish them. */
3701
3702 static void
3703 delete_handlers ()
3704 {
3705 rtx insn;
3706 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3707 {
3708 /* Delete the handler by turning off the flag that would
3709 prevent jump_optimize from deleting it.
3710 Also permit deletion of the nonlocal labels themselves
3711 if nothing local refers to them. */
3712 if (GET_CODE (insn) == CODE_LABEL)
3713 {
3714 tree t, last_t;
3715
3716 LABEL_PRESERVE_P (insn) = 0;
3717
3718 /* Remove it from the nonlocal_label list, to avoid confusing
3719 flow. */
3720 for (t = nonlocal_labels, last_t = 0; t;
3721 last_t = t, t = TREE_CHAIN (t))
3722 if (DECL_RTL (TREE_VALUE (t)) == insn)
3723 break;
3724 if (t)
3725 {
3726 if (! last_t)
3727 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3728 else
3729 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3730 }
3731 }
3732 if (GET_CODE (insn) == INSN)
3733 {
3734 int can_delete = 0;
3735 rtx t;
3736 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3737 if (reg_mentioned_p (t, PATTERN (insn)))
3738 {
3739 can_delete = 1;
3740 break;
3741 }
3742 if (can_delete
3743 || (nonlocal_goto_stack_level != 0
3744 && reg_mentioned_p (nonlocal_goto_stack_level,
3745 PATTERN (insn))))
3746 delete_insn (insn);
3747 }
3748 }
3749 }
3750
3751 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3752 of the current function. */
3753
3754 rtx
3755 nonlocal_label_rtx_list ()
3756 {
3757 tree t;
3758 rtx x = 0;
3759
3760 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3761 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3762
3763 return x;
3764 }
3765 \f
3766 /* Output a USE for any register use in RTL.
3767 This is used with -noreg to mark the extent of lifespan
3768 of any registers used in a user-visible variable's DECL_RTL. */
3769
3770 void
3771 use_variable (rtl)
3772 rtx rtl;
3773 {
3774 if (GET_CODE (rtl) == REG)
3775 /* This is a register variable. */
3776 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3777 else if (GET_CODE (rtl) == MEM
3778 && GET_CODE (XEXP (rtl, 0)) == REG
3779 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3780 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3781 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3782 /* This is a variable-sized structure. */
3783 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3784 }
3785
3786 /* Like use_variable except that it outputs the USEs after INSN
3787 instead of at the end of the insn-chain. */
3788
3789 void
3790 use_variable_after (rtl, insn)
3791 rtx rtl, insn;
3792 {
3793 if (GET_CODE (rtl) == REG)
3794 /* This is a register variable. */
3795 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3796 else if (GET_CODE (rtl) == MEM
3797 && GET_CODE (XEXP (rtl, 0)) == REG
3798 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3799 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3800 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3801 /* This is a variable-sized structure. */
3802 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3803 }
3804 \f
3805 int
3806 max_parm_reg_num ()
3807 {
3808 return max_parm_reg;
3809 }
3810
3811 /* Return the first insn following those generated by `assign_parms'. */
3812
3813 rtx
3814 get_first_nonparm_insn ()
3815 {
3816 if (last_parm_insn)
3817 return NEXT_INSN (last_parm_insn);
3818 return get_insns ();
3819 }
3820
3821 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3822 Crash if there is none. */
3823
3824 rtx
3825 get_first_block_beg ()
3826 {
3827 register rtx searcher;
3828 register rtx insn = get_first_nonparm_insn ();
3829
3830 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3831 if (GET_CODE (searcher) == NOTE
3832 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3833 return searcher;
3834
3835 abort (); /* Invalid call to this function. (See comments above.) */
3836 return NULL_RTX;
3837 }
3838
3839 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3840 This means a type for which function calls must pass an address to the
3841 function or get an address back from the function.
3842 EXP may be a type node or an expression (whose type is tested). */
3843
3844 int
3845 aggregate_value_p (exp)
3846 tree exp;
3847 {
3848 int i, regno, nregs;
3849 rtx reg;
3850 tree type;
3851 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3852 type = exp;
3853 else
3854 type = TREE_TYPE (exp);
3855
3856 if (RETURN_IN_MEMORY (type))
3857 return 1;
3858 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3859 and thus can't be returned in registers. */
3860 if (TREE_ADDRESSABLE (type))
3861 return 1;
3862 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3863 return 1;
3864 /* Make sure we have suitable call-clobbered regs to return
3865 the value in; if not, we must return it in memory. */
3866 reg = hard_function_value (type, 0);
3867
3868 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3869 it is OK. */
3870 if (GET_CODE (reg) != REG)
3871 return 0;
3872
3873 regno = REGNO (reg);
3874 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3875 for (i = 0; i < nregs; i++)
3876 if (! call_used_regs[regno + i])
3877 return 1;
3878 return 0;
3879 }
3880 \f
3881 /* Assign RTL expressions to the function's parameters.
3882 This may involve copying them into registers and using
3883 those registers as the RTL for them.
3884
3885 If SECOND_TIME is non-zero it means that this function is being
3886 called a second time. This is done by integrate.c when a function's
3887 compilation is deferred. We need to come back here in case the
3888 FUNCTION_ARG macro computes items needed for the rest of the compilation
3889 (such as changing which registers are fixed or caller-saved). But suppress
3890 writing any insns or setting DECL_RTL of anything in this case. */
3891
3892 void
3893 assign_parms (fndecl, second_time)
3894 tree fndecl;
3895 int second_time;
3896 {
3897 register tree parm;
3898 register rtx entry_parm = 0;
3899 register rtx stack_parm = 0;
3900 CUMULATIVE_ARGS args_so_far;
3901 enum machine_mode promoted_mode, passed_mode;
3902 enum machine_mode nominal_mode, promoted_nominal_mode;
3903 int unsignedp;
3904 /* Total space needed so far for args on the stack,
3905 given as a constant and a tree-expression. */
3906 struct args_size stack_args_size;
3907 tree fntype = TREE_TYPE (fndecl);
3908 tree fnargs = DECL_ARGUMENTS (fndecl);
3909 /* This is used for the arg pointer when referring to stack args. */
3910 rtx internal_arg_pointer;
3911 /* This is a dummy PARM_DECL that we used for the function result if
3912 the function returns a structure. */
3913 tree function_result_decl = 0;
3914 int varargs_setup = 0;
3915 rtx conversion_insns = 0;
3916
3917 /* Nonzero if the last arg is named `__builtin_va_alist',
3918 which is used on some machines for old-fashioned non-ANSI varargs.h;
3919 this should be stuck onto the stack as if it had arrived there. */
3920 int hide_last_arg
3921 = (current_function_varargs
3922 && fnargs
3923 && (parm = tree_last (fnargs)) != 0
3924 && DECL_NAME (parm)
3925 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3926 "__builtin_va_alist")));
3927
3928 /* Nonzero if function takes extra anonymous args.
3929 This means the last named arg must be on the stack
3930 right before the anonymous ones. */
3931 int stdarg
3932 = (TYPE_ARG_TYPES (fntype) != 0
3933 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3934 != void_type_node));
3935
3936 current_function_stdarg = stdarg;
3937
3938 /* If the reg that the virtual arg pointer will be translated into is
3939 not a fixed reg or is the stack pointer, make a copy of the virtual
3940 arg pointer, and address parms via the copy. The frame pointer is
3941 considered fixed even though it is not marked as such.
3942
3943 The second time through, simply use ap to avoid generating rtx. */
3944
3945 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3946 || ! (fixed_regs[ARG_POINTER_REGNUM]
3947 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3948 && ! second_time)
3949 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3950 else
3951 internal_arg_pointer = virtual_incoming_args_rtx;
3952 current_function_internal_arg_pointer = internal_arg_pointer;
3953
3954 stack_args_size.constant = 0;
3955 stack_args_size.var = 0;
3956
3957 /* If struct value address is treated as the first argument, make it so. */
3958 if (aggregate_value_p (DECL_RESULT (fndecl))
3959 && ! current_function_returns_pcc_struct
3960 && struct_value_incoming_rtx == 0)
3961 {
3962 tree type = build_pointer_type (TREE_TYPE (fntype));
3963
3964 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3965
3966 DECL_ARG_TYPE (function_result_decl) = type;
3967 TREE_CHAIN (function_result_decl) = fnargs;
3968 fnargs = function_result_decl;
3969 }
3970
3971 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3972 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3973 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3974
3975 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3976 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3977 #else
3978 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3979 #endif
3980
3981 /* We haven't yet found an argument that we must push and pretend the
3982 caller did. */
3983 current_function_pretend_args_size = 0;
3984
3985 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3986 {
3987 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3988 struct args_size stack_offset;
3989 struct args_size arg_size;
3990 int passed_pointer = 0;
3991 int did_conversion = 0;
3992 tree passed_type = DECL_ARG_TYPE (parm);
3993 tree nominal_type = TREE_TYPE (parm);
3994
3995 /* Set LAST_NAMED if this is last named arg before some
3996 anonymous args. */
3997 int last_named = ((TREE_CHAIN (parm) == 0
3998 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3999 && (stdarg || current_function_varargs));
4000 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4001 most machines, if this is a varargs/stdarg function, then we treat
4002 the last named arg as if it were anonymous too. */
4003 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4004
4005 if (TREE_TYPE (parm) == error_mark_node
4006 /* This can happen after weird syntax errors
4007 or if an enum type is defined among the parms. */
4008 || TREE_CODE (parm) != PARM_DECL
4009 || passed_type == NULL)
4010 {
4011 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4012 = gen_rtx_MEM (BLKmode, const0_rtx);
4013 TREE_USED (parm) = 1;
4014 continue;
4015 }
4016
4017 /* For varargs.h function, save info about regs and stack space
4018 used by the individual args, not including the va_alist arg. */
4019 if (hide_last_arg && last_named)
4020 current_function_args_info = args_so_far;
4021
4022 /* Find mode of arg as it is passed, and mode of arg
4023 as it should be during execution of this function. */
4024 passed_mode = TYPE_MODE (passed_type);
4025 nominal_mode = TYPE_MODE (nominal_type);
4026
4027 /* If the parm's mode is VOID, its value doesn't matter,
4028 and avoid the usual things like emit_move_insn that could crash. */
4029 if (nominal_mode == VOIDmode)
4030 {
4031 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4032 continue;
4033 }
4034
4035 /* If the parm is to be passed as a transparent union, use the
4036 type of the first field for the tests below. We have already
4037 verified that the modes are the same. */
4038 if (DECL_TRANSPARENT_UNION (parm)
4039 || TYPE_TRANSPARENT_UNION (passed_type))
4040 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4041
4042 /* See if this arg was passed by invisible reference. It is if
4043 it is an object whose size depends on the contents of the
4044 object itself or if the machine requires these objects be passed
4045 that way. */
4046
4047 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4048 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4049 || TREE_ADDRESSABLE (passed_type)
4050 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4051 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4052 passed_type, named_arg)
4053 #endif
4054 )
4055 {
4056 passed_type = nominal_type = build_pointer_type (passed_type);
4057 passed_pointer = 1;
4058 passed_mode = nominal_mode = Pmode;
4059 }
4060
4061 promoted_mode = passed_mode;
4062
4063 #ifdef PROMOTE_FUNCTION_ARGS
4064 /* Compute the mode in which the arg is actually extended to. */
4065 unsignedp = TREE_UNSIGNED (passed_type);
4066 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4067 #endif
4068
4069 /* Let machine desc say which reg (if any) the parm arrives in.
4070 0 means it arrives on the stack. */
4071 #ifdef FUNCTION_INCOMING_ARG
4072 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4073 passed_type, named_arg);
4074 #else
4075 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4076 passed_type, named_arg);
4077 #endif
4078
4079 if (entry_parm == 0)
4080 promoted_mode = passed_mode;
4081
4082 #ifdef SETUP_INCOMING_VARARGS
4083 /* If this is the last named parameter, do any required setup for
4084 varargs or stdargs. We need to know about the case of this being an
4085 addressable type, in which case we skip the registers it
4086 would have arrived in.
4087
4088 For stdargs, LAST_NAMED will be set for two parameters, the one that
4089 is actually the last named, and the dummy parameter. We only
4090 want to do this action once.
4091
4092 Also, indicate when RTL generation is to be suppressed. */
4093 if (last_named && !varargs_setup)
4094 {
4095 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4096 current_function_pretend_args_size,
4097 second_time);
4098 varargs_setup = 1;
4099 }
4100 #endif
4101
4102 /* Determine parm's home in the stack,
4103 in case it arrives in the stack or we should pretend it did.
4104
4105 Compute the stack position and rtx where the argument arrives
4106 and its size.
4107
4108 There is one complexity here: If this was a parameter that would
4109 have been passed in registers, but wasn't only because it is
4110 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4111 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4112 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4113 0 as it was the previous time. */
4114
4115 locate_and_pad_parm (nominal_mode, passed_type,
4116 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4117 1,
4118 #else
4119 #ifdef FUNCTION_INCOMING_ARG
4120 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4121 passed_type,
4122 (named_arg
4123 || varargs_setup)) != 0,
4124 #else
4125 FUNCTION_ARG (args_so_far, promoted_mode,
4126 passed_type,
4127 named_arg || varargs_setup) != 0,
4128 #endif
4129 #endif
4130 fndecl, &stack_args_size, &stack_offset, &arg_size);
4131
4132 if (! second_time)
4133 {
4134 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4135
4136 if (offset_rtx == const0_rtx)
4137 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4138 else
4139 stack_parm = gen_rtx_MEM (nominal_mode,
4140 gen_rtx_PLUS (Pmode,
4141 internal_arg_pointer,
4142 offset_rtx));
4143
4144 /* If this is a memory ref that contains aggregate components,
4145 mark it as such for cse and loop optimize. Likewise if it
4146 is readonly. */
4147 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4148 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4149 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4150 }
4151
4152 /* If this parameter was passed both in registers and in the stack,
4153 use the copy on the stack. */
4154 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4155 entry_parm = 0;
4156
4157 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4158 /* If this parm was passed part in regs and part in memory,
4159 pretend it arrived entirely in memory
4160 by pushing the register-part onto the stack.
4161
4162 In the special case of a DImode or DFmode that is split,
4163 we could put it together in a pseudoreg directly,
4164 but for now that's not worth bothering with. */
4165
4166 if (entry_parm)
4167 {
4168 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4169 passed_type, named_arg);
4170
4171 if (nregs > 0)
4172 {
4173 current_function_pretend_args_size
4174 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4175 / (PARM_BOUNDARY / BITS_PER_UNIT)
4176 * (PARM_BOUNDARY / BITS_PER_UNIT));
4177
4178 if (! second_time)
4179 {
4180 /* Handle calls that pass values in multiple non-contiguous
4181 locations. The Irix 6 ABI has examples of this. */
4182 if (GET_CODE (entry_parm) == PARALLEL)
4183 emit_group_store (validize_mem (stack_parm), entry_parm,
4184 int_size_in_bytes (TREE_TYPE (parm)),
4185 (TYPE_ALIGN (TREE_TYPE (parm))
4186 / BITS_PER_UNIT));
4187 else
4188 move_block_from_reg (REGNO (entry_parm),
4189 validize_mem (stack_parm), nregs,
4190 int_size_in_bytes (TREE_TYPE (parm)));
4191 }
4192 entry_parm = stack_parm;
4193 }
4194 }
4195 #endif
4196
4197 /* If we didn't decide this parm came in a register,
4198 by default it came on the stack. */
4199 if (entry_parm == 0)
4200 entry_parm = stack_parm;
4201
4202 /* Record permanently how this parm was passed. */
4203 if (! second_time)
4204 DECL_INCOMING_RTL (parm) = entry_parm;
4205
4206 /* If there is actually space on the stack for this parm,
4207 count it in stack_args_size; otherwise set stack_parm to 0
4208 to indicate there is no preallocated stack slot for the parm. */
4209
4210 if (entry_parm == stack_parm
4211 || (GET_CODE (entry_parm) == PARALLEL
4212 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4213 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4214 /* On some machines, even if a parm value arrives in a register
4215 there is still an (uninitialized) stack slot allocated for it.
4216
4217 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4218 whether this parameter already has a stack slot allocated,
4219 because an arg block exists only if current_function_args_size
4220 is larger than some threshold, and we haven't calculated that
4221 yet. So, for now, we just assume that stack slots never exist
4222 in this case. */
4223 || REG_PARM_STACK_SPACE (fndecl) > 0
4224 #endif
4225 )
4226 {
4227 stack_args_size.constant += arg_size.constant;
4228 if (arg_size.var)
4229 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4230 }
4231 else
4232 /* No stack slot was pushed for this parm. */
4233 stack_parm = 0;
4234
4235 /* Update info on where next arg arrives in registers. */
4236
4237 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4238 passed_type, named_arg);
4239
4240 /* If this is our second time through, we are done with this parm. */
4241 if (second_time)
4242 continue;
4243
4244 /* If we can't trust the parm stack slot to be aligned enough
4245 for its ultimate type, don't use that slot after entry.
4246 We'll make another stack slot, if we need one. */
4247 {
4248 int thisparm_boundary
4249 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4250
4251 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4252 stack_parm = 0;
4253 }
4254
4255 /* If parm was passed in memory, and we need to convert it on entry,
4256 don't store it back in that same slot. */
4257 if (entry_parm != 0
4258 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4259 stack_parm = 0;
4260
4261 #if 0
4262 /* Now adjust STACK_PARM to the mode and precise location
4263 where this parameter should live during execution,
4264 if we discover that it must live in the stack during execution.
4265 To make debuggers happier on big-endian machines, we store
4266 the value in the last bytes of the space available. */
4267
4268 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4269 && stack_parm != 0)
4270 {
4271 rtx offset_rtx;
4272
4273 if (BYTES_BIG_ENDIAN
4274 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4275 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4276 - GET_MODE_SIZE (nominal_mode));
4277
4278 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4279 if (offset_rtx == const0_rtx)
4280 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4281 else
4282 stack_parm = gen_rtx_MEM (nominal_mode,
4283 gen_rtx_PLUS (Pmode,
4284 internal_arg_pointer,
4285 offset_rtx));
4286
4287 /* If this is a memory ref that contains aggregate components,
4288 mark it as such for cse and loop optimize. */
4289 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4290 }
4291 #endif /* 0 */
4292
4293 #ifdef STACK_REGS
4294 /* We need this "use" info, because the gcc-register->stack-register
4295 converter in reg-stack.c needs to know which registers are active
4296 at the start of the function call. The actual parameter loading
4297 instructions are not always available then anymore, since they might
4298 have been optimised away. */
4299
4300 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4301 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4302 #endif
4303
4304 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4305 in the mode in which it arrives.
4306 STACK_PARM is an RTX for a stack slot where the parameter can live
4307 during the function (in case we want to put it there).
4308 STACK_PARM is 0 if no stack slot was pushed for it.
4309
4310 Now output code if necessary to convert ENTRY_PARM to
4311 the type in which this function declares it,
4312 and store that result in an appropriate place,
4313 which may be a pseudo reg, may be STACK_PARM,
4314 or may be a local stack slot if STACK_PARM is 0.
4315
4316 Set DECL_RTL to that place. */
4317
4318 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4319 {
4320 /* If a BLKmode arrives in registers, copy it to a stack slot.
4321 Handle calls that pass values in multiple non-contiguous
4322 locations. The Irix 6 ABI has examples of this. */
4323 if (GET_CODE (entry_parm) == REG
4324 || GET_CODE (entry_parm) == PARALLEL)
4325 {
4326 int size_stored
4327 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4328 UNITS_PER_WORD);
4329
4330 /* Note that we will be storing an integral number of words.
4331 So we have to be careful to ensure that we allocate an
4332 integral number of words. We do this below in the
4333 assign_stack_local if space was not allocated in the argument
4334 list. If it was, this will not work if PARM_BOUNDARY is not
4335 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4336 if it becomes a problem. */
4337
4338 if (stack_parm == 0)
4339 {
4340 stack_parm
4341 = assign_stack_local (GET_MODE (entry_parm),
4342 size_stored, 0);
4343
4344 /* If this is a memory ref that contains aggregate
4345 components, mark it as such for cse and loop optimize. */
4346 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4347 }
4348
4349 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4350 abort ();
4351
4352 if (TREE_READONLY (parm))
4353 RTX_UNCHANGING_P (stack_parm) = 1;
4354
4355 /* Handle calls that pass values in multiple non-contiguous
4356 locations. The Irix 6 ABI has examples of this. */
4357 if (GET_CODE (entry_parm) == PARALLEL)
4358 emit_group_store (validize_mem (stack_parm), entry_parm,
4359 int_size_in_bytes (TREE_TYPE (parm)),
4360 (TYPE_ALIGN (TREE_TYPE (parm))
4361 / BITS_PER_UNIT));
4362 else
4363 move_block_from_reg (REGNO (entry_parm),
4364 validize_mem (stack_parm),
4365 size_stored / UNITS_PER_WORD,
4366 int_size_in_bytes (TREE_TYPE (parm)));
4367 }
4368 DECL_RTL (parm) = stack_parm;
4369 }
4370 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4371 && ! DECL_INLINE (fndecl))
4372 /* layout_decl may set this. */
4373 || TREE_ADDRESSABLE (parm)
4374 || TREE_SIDE_EFFECTS (parm)
4375 /* If -ffloat-store specified, don't put explicit
4376 float variables into registers. */
4377 || (flag_float_store
4378 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4379 /* Always assign pseudo to structure return or item passed
4380 by invisible reference. */
4381 || passed_pointer || parm == function_result_decl)
4382 {
4383 /* Store the parm in a pseudoregister during the function, but we
4384 may need to do it in a wider mode. */
4385
4386 register rtx parmreg;
4387 int regno, regnoi = 0, regnor = 0;
4388
4389 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4390
4391 promoted_nominal_mode
4392 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4393
4394 parmreg = gen_reg_rtx (promoted_nominal_mode);
4395 mark_user_reg (parmreg);
4396
4397 /* If this was an item that we received a pointer to, set DECL_RTL
4398 appropriately. */
4399 if (passed_pointer)
4400 {
4401 DECL_RTL (parm)
4402 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4403 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4404 }
4405 else
4406 DECL_RTL (parm) = parmreg;
4407
4408 /* Copy the value into the register. */
4409 if (nominal_mode != passed_mode
4410 || promoted_nominal_mode != promoted_mode)
4411 {
4412 int save_tree_used;
4413 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4414 mode, by the caller. We now have to convert it to
4415 NOMINAL_MODE, if different. However, PARMREG may be in
4416 a different mode than NOMINAL_MODE if it is being stored
4417 promoted.
4418
4419 If ENTRY_PARM is a hard register, it might be in a register
4420 not valid for operating in its mode (e.g., an odd-numbered
4421 register for a DFmode). In that case, moves are the only
4422 thing valid, so we can't do a convert from there. This
4423 occurs when the calling sequence allow such misaligned
4424 usages.
4425
4426 In addition, the conversion may involve a call, which could
4427 clobber parameters which haven't been copied to pseudo
4428 registers yet. Therefore, we must first copy the parm to
4429 a pseudo reg here, and save the conversion until after all
4430 parameters have been moved. */
4431
4432 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4433
4434 emit_move_insn (tempreg, validize_mem (entry_parm));
4435
4436 push_to_sequence (conversion_insns);
4437 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4438
4439 /* TREE_USED gets set erroneously during expand_assignment. */
4440 save_tree_used = TREE_USED (parm);
4441 expand_assignment (parm,
4442 make_tree (nominal_type, tempreg), 0, 0);
4443 TREE_USED (parm) = save_tree_used;
4444 conversion_insns = get_insns ();
4445 did_conversion = 1;
4446 end_sequence ();
4447 }
4448 else
4449 emit_move_insn (parmreg, validize_mem (entry_parm));
4450
4451 /* If we were passed a pointer but the actual value
4452 can safely live in a register, put it in one. */
4453 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4454 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4455 && ! DECL_INLINE (fndecl))
4456 /* layout_decl may set this. */
4457 || TREE_ADDRESSABLE (parm)
4458 || TREE_SIDE_EFFECTS (parm)
4459 /* If -ffloat-store specified, don't put explicit
4460 float variables into registers. */
4461 || (flag_float_store
4462 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4463 {
4464 /* We can't use nominal_mode, because it will have been set to
4465 Pmode above. We must use the actual mode of the parm. */
4466 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4467 mark_user_reg (parmreg);
4468 emit_move_insn (parmreg, DECL_RTL (parm));
4469 DECL_RTL (parm) = parmreg;
4470 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4471 now the parm. */
4472 stack_parm = 0;
4473 }
4474 #ifdef FUNCTION_ARG_CALLEE_COPIES
4475 /* If we are passed an arg by reference and it is our responsibility
4476 to make a copy, do it now.
4477 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4478 original argument, so we must recreate them in the call to
4479 FUNCTION_ARG_CALLEE_COPIES. */
4480 /* ??? Later add code to handle the case that if the argument isn't
4481 modified, don't do the copy. */
4482
4483 else if (passed_pointer
4484 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4485 TYPE_MODE (DECL_ARG_TYPE (parm)),
4486 DECL_ARG_TYPE (parm),
4487 named_arg)
4488 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4489 {
4490 rtx copy;
4491 tree type = DECL_ARG_TYPE (parm);
4492
4493 /* This sequence may involve a library call perhaps clobbering
4494 registers that haven't been copied to pseudos yet. */
4495
4496 push_to_sequence (conversion_insns);
4497
4498 if (TYPE_SIZE (type) == 0
4499 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4500 /* This is a variable sized object. */
4501 copy = gen_rtx_MEM (BLKmode,
4502 allocate_dynamic_stack_space
4503 (expr_size (parm), NULL_RTX,
4504 TYPE_ALIGN (type)));
4505 else
4506 copy = assign_stack_temp (TYPE_MODE (type),
4507 int_size_in_bytes (type), 1);
4508 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4509 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4510
4511 store_expr (parm, copy, 0);
4512 emit_move_insn (parmreg, XEXP (copy, 0));
4513 if (current_function_check_memory_usage)
4514 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4515 XEXP (copy, 0), ptr_mode,
4516 GEN_INT (int_size_in_bytes (type)),
4517 TYPE_MODE (sizetype),
4518 GEN_INT (MEMORY_USE_RW),
4519 TYPE_MODE (integer_type_node));
4520 conversion_insns = get_insns ();
4521 did_conversion = 1;
4522 end_sequence ();
4523 }
4524 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4525
4526 /* In any case, record the parm's desired stack location
4527 in case we later discover it must live in the stack.
4528
4529 If it is a COMPLEX value, store the stack location for both
4530 halves. */
4531
4532 if (GET_CODE (parmreg) == CONCAT)
4533 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4534 else
4535 regno = REGNO (parmreg);
4536
4537 if (regno >= max_parm_reg)
4538 {
4539 rtx *new;
4540 int old_max_parm_reg = max_parm_reg;
4541
4542 /* It's slow to expand this one register at a time,
4543 but it's also rare and we need max_parm_reg to be
4544 precisely correct. */
4545 max_parm_reg = regno + 1;
4546 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4547 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4548 old_max_parm_reg * sizeof (rtx));
4549 bzero ((char *) (new + old_max_parm_reg),
4550 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4551 parm_reg_stack_loc = new;
4552 }
4553
4554 if (GET_CODE (parmreg) == CONCAT)
4555 {
4556 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4557
4558 regnor = REGNO (gen_realpart (submode, parmreg));
4559 regnoi = REGNO (gen_imagpart (submode, parmreg));
4560
4561 if (stack_parm != 0)
4562 {
4563 parm_reg_stack_loc[regnor]
4564 = gen_realpart (submode, stack_parm);
4565 parm_reg_stack_loc[regnoi]
4566 = gen_imagpart (submode, stack_parm);
4567 }
4568 else
4569 {
4570 parm_reg_stack_loc[regnor] = 0;
4571 parm_reg_stack_loc[regnoi] = 0;
4572 }
4573 }
4574 else
4575 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4576
4577 /* Mark the register as eliminable if we did no conversion
4578 and it was copied from memory at a fixed offset,
4579 and the arg pointer was not copied to a pseudo-reg.
4580 If the arg pointer is a pseudo reg or the offset formed
4581 an invalid address, such memory-equivalences
4582 as we make here would screw up life analysis for it. */
4583 if (nominal_mode == passed_mode
4584 && ! did_conversion
4585 && stack_parm != 0
4586 && GET_CODE (stack_parm) == MEM
4587 && stack_offset.var == 0
4588 && reg_mentioned_p (virtual_incoming_args_rtx,
4589 XEXP (stack_parm, 0)))
4590 {
4591 rtx linsn = get_last_insn ();
4592 rtx sinsn, set;
4593
4594 /* Mark complex types separately. */
4595 if (GET_CODE (parmreg) == CONCAT)
4596 /* Scan backwards for the set of the real and
4597 imaginary parts. */
4598 for (sinsn = linsn; sinsn != 0;
4599 sinsn = prev_nonnote_insn (sinsn))
4600 {
4601 set = single_set (sinsn);
4602 if (set != 0
4603 && SET_DEST (set) == regno_reg_rtx [regnoi])
4604 REG_NOTES (sinsn)
4605 = gen_rtx_EXPR_LIST (REG_EQUIV,
4606 parm_reg_stack_loc[regnoi],
4607 REG_NOTES (sinsn));
4608 else if (set != 0
4609 && SET_DEST (set) == regno_reg_rtx [regnor])
4610 REG_NOTES (sinsn)
4611 = gen_rtx_EXPR_LIST (REG_EQUIV,
4612 parm_reg_stack_loc[regnor],
4613 REG_NOTES (sinsn));
4614 }
4615 else if ((set = single_set (linsn)) != 0
4616 && SET_DEST (set) == parmreg)
4617 REG_NOTES (linsn)
4618 = gen_rtx_EXPR_LIST (REG_EQUIV,
4619 stack_parm, REG_NOTES (linsn));
4620 }
4621
4622 /* For pointer data type, suggest pointer register. */
4623 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4624 mark_reg_pointer (parmreg,
4625 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4626 / BITS_PER_UNIT));
4627 }
4628 else
4629 {
4630 /* Value must be stored in the stack slot STACK_PARM
4631 during function execution. */
4632
4633 if (promoted_mode != nominal_mode)
4634 {
4635 /* Conversion is required. */
4636 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4637
4638 emit_move_insn (tempreg, validize_mem (entry_parm));
4639
4640 push_to_sequence (conversion_insns);
4641 entry_parm = convert_to_mode (nominal_mode, tempreg,
4642 TREE_UNSIGNED (TREE_TYPE (parm)));
4643 if (stack_parm)
4644 {
4645 /* ??? This may need a big-endian conversion on sparc64. */
4646 stack_parm = change_address (stack_parm, nominal_mode,
4647 NULL_RTX);
4648 }
4649 conversion_insns = get_insns ();
4650 did_conversion = 1;
4651 end_sequence ();
4652 }
4653
4654 if (entry_parm != stack_parm)
4655 {
4656 if (stack_parm == 0)
4657 {
4658 stack_parm
4659 = assign_stack_local (GET_MODE (entry_parm),
4660 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4661 /* If this is a memory ref that contains aggregate components,
4662 mark it as such for cse and loop optimize. */
4663 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4664 }
4665
4666 if (promoted_mode != nominal_mode)
4667 {
4668 push_to_sequence (conversion_insns);
4669 emit_move_insn (validize_mem (stack_parm),
4670 validize_mem (entry_parm));
4671 conversion_insns = get_insns ();
4672 end_sequence ();
4673 }
4674 else
4675 emit_move_insn (validize_mem (stack_parm),
4676 validize_mem (entry_parm));
4677 }
4678 if (current_function_check_memory_usage)
4679 {
4680 push_to_sequence (conversion_insns);
4681 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4682 XEXP (stack_parm, 0), ptr_mode,
4683 GEN_INT (GET_MODE_SIZE (GET_MODE
4684 (entry_parm))),
4685 TYPE_MODE (sizetype),
4686 GEN_INT (MEMORY_USE_RW),
4687 TYPE_MODE (integer_type_node));
4688
4689 conversion_insns = get_insns ();
4690 end_sequence ();
4691 }
4692 DECL_RTL (parm) = stack_parm;
4693 }
4694
4695 /* If this "parameter" was the place where we are receiving the
4696 function's incoming structure pointer, set up the result. */
4697 if (parm == function_result_decl)
4698 {
4699 tree result = DECL_RESULT (fndecl);
4700 tree restype = TREE_TYPE (result);
4701
4702 DECL_RTL (result)
4703 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4704
4705 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4706 AGGREGATE_TYPE_P (restype));
4707 }
4708
4709 if (TREE_THIS_VOLATILE (parm))
4710 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4711 if (TREE_READONLY (parm))
4712 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4713 }
4714
4715 /* Output all parameter conversion instructions (possibly including calls)
4716 now that all parameters have been copied out of hard registers. */
4717 emit_insns (conversion_insns);
4718
4719 last_parm_insn = get_last_insn ();
4720
4721 current_function_args_size = stack_args_size.constant;
4722
4723 /* Adjust function incoming argument size for alignment and
4724 minimum length. */
4725
4726 #ifdef REG_PARM_STACK_SPACE
4727 #ifndef MAYBE_REG_PARM_STACK_SPACE
4728 current_function_args_size = MAX (current_function_args_size,
4729 REG_PARM_STACK_SPACE (fndecl));
4730 #endif
4731 #endif
4732
4733 #ifdef PREFERRED_STACK_BOUNDARY
4734 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
4735
4736 current_function_args_size
4737 = ((current_function_args_size + STACK_BYTES - 1)
4738 / STACK_BYTES) * STACK_BYTES;
4739 #endif
4740
4741 #ifdef ARGS_GROW_DOWNWARD
4742 current_function_arg_offset_rtx
4743 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4744 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4745 size_int (-stack_args_size.constant)),
4746 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4747 #else
4748 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4749 #endif
4750
4751 /* See how many bytes, if any, of its args a function should try to pop
4752 on return. */
4753
4754 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4755 current_function_args_size);
4756
4757 /* For stdarg.h function, save info about
4758 regs and stack space used by the named args. */
4759
4760 if (!hide_last_arg)
4761 current_function_args_info = args_so_far;
4762
4763 /* Set the rtx used for the function return value. Put this in its
4764 own variable so any optimizers that need this information don't have
4765 to include tree.h. Do this here so it gets done when an inlined
4766 function gets output. */
4767
4768 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4769 }
4770 \f
4771 /* Indicate whether REGNO is an incoming argument to the current function
4772 that was promoted to a wider mode. If so, return the RTX for the
4773 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4774 that REGNO is promoted from and whether the promotion was signed or
4775 unsigned. */
4776
4777 #ifdef PROMOTE_FUNCTION_ARGS
4778
4779 rtx
4780 promoted_input_arg (regno, pmode, punsignedp)
4781 int regno;
4782 enum machine_mode *pmode;
4783 int *punsignedp;
4784 {
4785 tree arg;
4786
4787 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4788 arg = TREE_CHAIN (arg))
4789 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4790 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4791 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4792 {
4793 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4794 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4795
4796 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4797 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4798 && mode != DECL_MODE (arg))
4799 {
4800 *pmode = DECL_MODE (arg);
4801 *punsignedp = unsignedp;
4802 return DECL_INCOMING_RTL (arg);
4803 }
4804 }
4805
4806 return 0;
4807 }
4808
4809 #endif
4810 \f
4811 /* Compute the size and offset from the start of the stacked arguments for a
4812 parm passed in mode PASSED_MODE and with type TYPE.
4813
4814 INITIAL_OFFSET_PTR points to the current offset into the stacked
4815 arguments.
4816
4817 The starting offset and size for this parm are returned in *OFFSET_PTR
4818 and *ARG_SIZE_PTR, respectively.
4819
4820 IN_REGS is non-zero if the argument will be passed in registers. It will
4821 never be set if REG_PARM_STACK_SPACE is not defined.
4822
4823 FNDECL is the function in which the argument was defined.
4824
4825 There are two types of rounding that are done. The first, controlled by
4826 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4827 list to be aligned to the specific boundary (in bits). This rounding
4828 affects the initial and starting offsets, but not the argument size.
4829
4830 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4831 optionally rounds the size of the parm to PARM_BOUNDARY. The
4832 initial offset is not affected by this rounding, while the size always
4833 is and the starting offset may be. */
4834
4835 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4836 initial_offset_ptr is positive because locate_and_pad_parm's
4837 callers pass in the total size of args so far as
4838 initial_offset_ptr. arg_size_ptr is always positive.*/
4839
4840 void
4841 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4842 initial_offset_ptr, offset_ptr, arg_size_ptr)
4843 enum machine_mode passed_mode;
4844 tree type;
4845 int in_regs;
4846 tree fndecl;
4847 struct args_size *initial_offset_ptr;
4848 struct args_size *offset_ptr;
4849 struct args_size *arg_size_ptr;
4850 {
4851 tree sizetree
4852 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4853 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4854 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4855
4856 #ifdef REG_PARM_STACK_SPACE
4857 /* If we have found a stack parm before we reach the end of the
4858 area reserved for registers, skip that area. */
4859 if (! in_regs)
4860 {
4861 int reg_parm_stack_space = 0;
4862
4863 #ifdef MAYBE_REG_PARM_STACK_SPACE
4864 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4865 #else
4866 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4867 #endif
4868 if (reg_parm_stack_space > 0)
4869 {
4870 if (initial_offset_ptr->var)
4871 {
4872 initial_offset_ptr->var
4873 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4874 size_int (reg_parm_stack_space));
4875 initial_offset_ptr->constant = 0;
4876 }
4877 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4878 initial_offset_ptr->constant = reg_parm_stack_space;
4879 }
4880 }
4881 #endif /* REG_PARM_STACK_SPACE */
4882
4883 arg_size_ptr->var = 0;
4884 arg_size_ptr->constant = 0;
4885
4886 #ifdef ARGS_GROW_DOWNWARD
4887 if (initial_offset_ptr->var)
4888 {
4889 offset_ptr->constant = 0;
4890 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4891 initial_offset_ptr->var);
4892 }
4893 else
4894 {
4895 offset_ptr->constant = - initial_offset_ptr->constant;
4896 offset_ptr->var = 0;
4897 }
4898 if (where_pad != none
4899 && (TREE_CODE (sizetree) != INTEGER_CST
4900 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4901 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4902 SUB_PARM_SIZE (*offset_ptr, sizetree);
4903 if (where_pad != downward)
4904 pad_to_arg_alignment (offset_ptr, boundary);
4905 if (initial_offset_ptr->var)
4906 {
4907 arg_size_ptr->var = size_binop (MINUS_EXPR,
4908 size_binop (MINUS_EXPR,
4909 integer_zero_node,
4910 initial_offset_ptr->var),
4911 offset_ptr->var);
4912 }
4913 else
4914 {
4915 arg_size_ptr->constant = (- initial_offset_ptr->constant
4916 - offset_ptr->constant);
4917 }
4918 #else /* !ARGS_GROW_DOWNWARD */
4919 pad_to_arg_alignment (initial_offset_ptr, boundary);
4920 *offset_ptr = *initial_offset_ptr;
4921
4922 #ifdef PUSH_ROUNDING
4923 if (passed_mode != BLKmode)
4924 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4925 #endif
4926
4927 /* Pad_below needs the pre-rounded size to know how much to pad below
4928 so this must be done before rounding up. */
4929 if (where_pad == downward
4930 /* However, BLKmode args passed in regs have their padding done elsewhere.
4931 The stack slot must be able to hold the entire register. */
4932 && !(in_regs && passed_mode == BLKmode))
4933 pad_below (offset_ptr, passed_mode, sizetree);
4934
4935 if (where_pad != none
4936 && (TREE_CODE (sizetree) != INTEGER_CST
4937 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4938 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4939
4940 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4941 #endif /* ARGS_GROW_DOWNWARD */
4942 }
4943
4944 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4945 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4946
4947 static void
4948 pad_to_arg_alignment (offset_ptr, boundary)
4949 struct args_size *offset_ptr;
4950 int boundary;
4951 {
4952 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4953
4954 if (boundary > BITS_PER_UNIT)
4955 {
4956 if (offset_ptr->var)
4957 {
4958 offset_ptr->var =
4959 #ifdef ARGS_GROW_DOWNWARD
4960 round_down
4961 #else
4962 round_up
4963 #endif
4964 (ARGS_SIZE_TREE (*offset_ptr),
4965 boundary / BITS_PER_UNIT);
4966 offset_ptr->constant = 0; /*?*/
4967 }
4968 else
4969 offset_ptr->constant =
4970 #ifdef ARGS_GROW_DOWNWARD
4971 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4972 #else
4973 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4974 #endif
4975 }
4976 }
4977
4978 #ifndef ARGS_GROW_DOWNWARD
4979 static void
4980 pad_below (offset_ptr, passed_mode, sizetree)
4981 struct args_size *offset_ptr;
4982 enum machine_mode passed_mode;
4983 tree sizetree;
4984 {
4985 if (passed_mode != BLKmode)
4986 {
4987 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4988 offset_ptr->constant
4989 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4990 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4991 - GET_MODE_SIZE (passed_mode));
4992 }
4993 else
4994 {
4995 if (TREE_CODE (sizetree) != INTEGER_CST
4996 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4997 {
4998 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4999 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5000 /* Add it in. */
5001 ADD_PARM_SIZE (*offset_ptr, s2);
5002 SUB_PARM_SIZE (*offset_ptr, sizetree);
5003 }
5004 }
5005 }
5006 #endif
5007
5008 #ifdef ARGS_GROW_DOWNWARD
5009 static tree
5010 round_down (value, divisor)
5011 tree value;
5012 int divisor;
5013 {
5014 return size_binop (MULT_EXPR,
5015 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5016 size_int (divisor));
5017 }
5018 #endif
5019 \f
5020 /* Walk the tree of blocks describing the binding levels within a function
5021 and warn about uninitialized variables.
5022 This is done after calling flow_analysis and before global_alloc
5023 clobbers the pseudo-regs to hard regs. */
5024
5025 void
5026 uninitialized_vars_warning (block)
5027 tree block;
5028 {
5029 register tree decl, sub;
5030 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5031 {
5032 if (TREE_CODE (decl) == VAR_DECL
5033 /* These warnings are unreliable for and aggregates
5034 because assigning the fields one by one can fail to convince
5035 flow.c that the entire aggregate was initialized.
5036 Unions are troublesome because members may be shorter. */
5037 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5038 && DECL_RTL (decl) != 0
5039 && GET_CODE (DECL_RTL (decl)) == REG
5040 /* Global optimizations can make it difficult to determine if a
5041 particular variable has been initialized. However, a VAR_DECL
5042 with a nonzero DECL_INITIAL had an initializer, so do not
5043 claim it is potentially uninitialized.
5044
5045 We do not care about the actual value in DECL_INITIAL, so we do
5046 not worry that it may be a dangling pointer. */
5047 && DECL_INITIAL (decl) == NULL_TREE
5048 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5049 warning_with_decl (decl,
5050 "`%s' might be used uninitialized in this function");
5051 if (TREE_CODE (decl) == VAR_DECL
5052 && DECL_RTL (decl) != 0
5053 && GET_CODE (DECL_RTL (decl)) == REG
5054 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5055 warning_with_decl (decl,
5056 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5057 }
5058 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5059 uninitialized_vars_warning (sub);
5060 }
5061
5062 /* Do the appropriate part of uninitialized_vars_warning
5063 but for arguments instead of local variables. */
5064
5065 void
5066 setjmp_args_warning ()
5067 {
5068 register tree decl;
5069 for (decl = DECL_ARGUMENTS (current_function_decl);
5070 decl; decl = TREE_CHAIN (decl))
5071 if (DECL_RTL (decl) != 0
5072 && GET_CODE (DECL_RTL (decl)) == REG
5073 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5074 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5075 }
5076
5077 /* If this function call setjmp, put all vars into the stack
5078 unless they were declared `register'. */
5079
5080 void
5081 setjmp_protect (block)
5082 tree block;
5083 {
5084 register tree decl, sub;
5085 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5086 if ((TREE_CODE (decl) == VAR_DECL
5087 || TREE_CODE (decl) == PARM_DECL)
5088 && DECL_RTL (decl) != 0
5089 && (GET_CODE (DECL_RTL (decl)) == REG
5090 || (GET_CODE (DECL_RTL (decl)) == MEM
5091 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5092 /* If this variable came from an inline function, it must be
5093 that its life doesn't overlap the setjmp. If there was a
5094 setjmp in the function, it would already be in memory. We
5095 must exclude such variable because their DECL_RTL might be
5096 set to strange things such as virtual_stack_vars_rtx. */
5097 && ! DECL_FROM_INLINE (decl)
5098 && (
5099 #ifdef NON_SAVING_SETJMP
5100 /* If longjmp doesn't restore the registers,
5101 don't put anything in them. */
5102 NON_SAVING_SETJMP
5103 ||
5104 #endif
5105 ! DECL_REGISTER (decl)))
5106 put_var_into_stack (decl);
5107 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5108 setjmp_protect (sub);
5109 }
5110 \f
5111 /* Like the previous function, but for args instead of local variables. */
5112
5113 void
5114 setjmp_protect_args ()
5115 {
5116 register tree decl;
5117 for (decl = DECL_ARGUMENTS (current_function_decl);
5118 decl; decl = TREE_CHAIN (decl))
5119 if ((TREE_CODE (decl) == VAR_DECL
5120 || TREE_CODE (decl) == PARM_DECL)
5121 && DECL_RTL (decl) != 0
5122 && (GET_CODE (DECL_RTL (decl)) == REG
5123 || (GET_CODE (DECL_RTL (decl)) == MEM
5124 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5125 && (
5126 /* If longjmp doesn't restore the registers,
5127 don't put anything in them. */
5128 #ifdef NON_SAVING_SETJMP
5129 NON_SAVING_SETJMP
5130 ||
5131 #endif
5132 ! DECL_REGISTER (decl)))
5133 put_var_into_stack (decl);
5134 }
5135 \f
5136 /* Return the context-pointer register corresponding to DECL,
5137 or 0 if it does not need one. */
5138
5139 rtx
5140 lookup_static_chain (decl)
5141 tree decl;
5142 {
5143 tree context = decl_function_context (decl);
5144 tree link;
5145
5146 if (context == 0
5147 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5148 return 0;
5149
5150 /* We treat inline_function_decl as an alias for the current function
5151 because that is the inline function whose vars, types, etc.
5152 are being merged into the current function.
5153 See expand_inline_function. */
5154 if (context == current_function_decl || context == inline_function_decl)
5155 return virtual_stack_vars_rtx;
5156
5157 for (link = context_display; link; link = TREE_CHAIN (link))
5158 if (TREE_PURPOSE (link) == context)
5159 return RTL_EXPR_RTL (TREE_VALUE (link));
5160
5161 abort ();
5162 }
5163 \f
5164 /* Convert a stack slot address ADDR for variable VAR
5165 (from a containing function)
5166 into an address valid in this function (using a static chain). */
5167
5168 rtx
5169 fix_lexical_addr (addr, var)
5170 rtx addr;
5171 tree var;
5172 {
5173 rtx basereg;
5174 HOST_WIDE_INT displacement;
5175 tree context = decl_function_context (var);
5176 struct function *fp;
5177 rtx base = 0;
5178
5179 /* If this is the present function, we need not do anything. */
5180 if (context == current_function_decl || context == inline_function_decl)
5181 return addr;
5182
5183 for (fp = outer_function_chain; fp; fp = fp->next)
5184 if (fp->decl == context)
5185 break;
5186
5187 if (fp == 0)
5188 abort ();
5189
5190 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5191 addr = XEXP (XEXP (addr, 0), 0);
5192
5193 /* Decode given address as base reg plus displacement. */
5194 if (GET_CODE (addr) == REG)
5195 basereg = addr, displacement = 0;
5196 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5197 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5198 else
5199 abort ();
5200
5201 /* We accept vars reached via the containing function's
5202 incoming arg pointer and via its stack variables pointer. */
5203 if (basereg == fp->internal_arg_pointer)
5204 {
5205 /* If reached via arg pointer, get the arg pointer value
5206 out of that function's stack frame.
5207
5208 There are two cases: If a separate ap is needed, allocate a
5209 slot in the outer function for it and dereference it that way.
5210 This is correct even if the real ap is actually a pseudo.
5211 Otherwise, just adjust the offset from the frame pointer to
5212 compensate. */
5213
5214 #ifdef NEED_SEPARATE_AP
5215 rtx addr;
5216
5217 if (fp->arg_pointer_save_area == 0)
5218 fp->arg_pointer_save_area
5219 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5220
5221 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5222 addr = memory_address (Pmode, addr);
5223
5224 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5225 #else
5226 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5227 base = lookup_static_chain (var);
5228 #endif
5229 }
5230
5231 else if (basereg == virtual_stack_vars_rtx)
5232 {
5233 /* This is the same code as lookup_static_chain, duplicated here to
5234 avoid an extra call to decl_function_context. */
5235 tree link;
5236
5237 for (link = context_display; link; link = TREE_CHAIN (link))
5238 if (TREE_PURPOSE (link) == context)
5239 {
5240 base = RTL_EXPR_RTL (TREE_VALUE (link));
5241 break;
5242 }
5243 }
5244
5245 if (base == 0)
5246 abort ();
5247
5248 /* Use same offset, relative to appropriate static chain or argument
5249 pointer. */
5250 return plus_constant (base, displacement);
5251 }
5252 \f
5253 /* Return the address of the trampoline for entering nested fn FUNCTION.
5254 If necessary, allocate a trampoline (in the stack frame)
5255 and emit rtl to initialize its contents (at entry to this function). */
5256
5257 rtx
5258 trampoline_address (function)
5259 tree function;
5260 {
5261 tree link;
5262 tree rtlexp;
5263 rtx tramp;
5264 struct function *fp;
5265 tree fn_context;
5266
5267 /* Find an existing trampoline and return it. */
5268 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5269 if (TREE_PURPOSE (link) == function)
5270 return
5271 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5272
5273 for (fp = outer_function_chain; fp; fp = fp->next)
5274 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5275 if (TREE_PURPOSE (link) == function)
5276 {
5277 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5278 function);
5279 return round_trampoline_addr (tramp);
5280 }
5281
5282 /* None exists; we must make one. */
5283
5284 /* Find the `struct function' for the function containing FUNCTION. */
5285 fp = 0;
5286 fn_context = decl_function_context (function);
5287 if (fn_context != current_function_decl
5288 && fn_context != inline_function_decl)
5289 for (fp = outer_function_chain; fp; fp = fp->next)
5290 if (fp->decl == fn_context)
5291 break;
5292
5293 /* Allocate run-time space for this trampoline
5294 (usually in the defining function's stack frame). */
5295 #ifdef ALLOCATE_TRAMPOLINE
5296 tramp = ALLOCATE_TRAMPOLINE (fp);
5297 #else
5298 /* If rounding needed, allocate extra space
5299 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5300 #ifdef TRAMPOLINE_ALIGNMENT
5301 #define TRAMPOLINE_REAL_SIZE \
5302 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5303 #else
5304 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5305 #endif
5306 if (fp != 0)
5307 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5308 else
5309 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5310 #endif
5311
5312 /* Record the trampoline for reuse and note it for later initialization
5313 by expand_function_end. */
5314 if (fp != 0)
5315 {
5316 push_obstacks (fp->function_maybepermanent_obstack,
5317 fp->function_maybepermanent_obstack);
5318 rtlexp = make_node (RTL_EXPR);
5319 RTL_EXPR_RTL (rtlexp) = tramp;
5320 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5321 pop_obstacks ();
5322 }
5323 else
5324 {
5325 /* Make the RTL_EXPR node temporary, not momentary, so that the
5326 trampoline_list doesn't become garbage. */
5327 int momentary = suspend_momentary ();
5328 rtlexp = make_node (RTL_EXPR);
5329 resume_momentary (momentary);
5330
5331 RTL_EXPR_RTL (rtlexp) = tramp;
5332 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5333 }
5334
5335 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5336 return round_trampoline_addr (tramp);
5337 }
5338
5339 /* Given a trampoline address,
5340 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5341
5342 static rtx
5343 round_trampoline_addr (tramp)
5344 rtx tramp;
5345 {
5346 #ifdef TRAMPOLINE_ALIGNMENT
5347 /* Round address up to desired boundary. */
5348 rtx temp = gen_reg_rtx (Pmode);
5349 temp = expand_binop (Pmode, add_optab, tramp,
5350 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5351 temp, 0, OPTAB_LIB_WIDEN);
5352 tramp = expand_binop (Pmode, and_optab, temp,
5353 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5354 temp, 0, OPTAB_LIB_WIDEN);
5355 #endif
5356 return tramp;
5357 }
5358 \f
5359 /* The functions identify_blocks and reorder_blocks provide a way to
5360 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5361 duplicate portions of the RTL code. Call identify_blocks before
5362 changing the RTL, and call reorder_blocks after. */
5363
5364 /* Put all this function's BLOCK nodes including those that are chained
5365 onto the first block into a vector, and return it.
5366 Also store in each NOTE for the beginning or end of a block
5367 the index of that block in the vector.
5368 The arguments are BLOCK, the chain of top-level blocks of the function,
5369 and INSNS, the insn chain of the function. */
5370
5371 tree *
5372 identify_blocks (block, insns)
5373 tree block;
5374 rtx insns;
5375 {
5376 int n_blocks;
5377 tree *block_vector;
5378 int *block_stack;
5379 int depth = 0;
5380 int next_block_number = 1;
5381 int current_block_number = 1;
5382 rtx insn;
5383
5384 if (block == 0)
5385 return 0;
5386
5387 n_blocks = all_blocks (block, 0);
5388 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5389 block_stack = (int *) alloca (n_blocks * sizeof (int));
5390
5391 all_blocks (block, block_vector);
5392
5393 for (insn = insns; insn; insn = NEXT_INSN (insn))
5394 if (GET_CODE (insn) == NOTE)
5395 {
5396 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5397 {
5398 block_stack[depth++] = current_block_number;
5399 current_block_number = next_block_number;
5400 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5401 }
5402 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5403 {
5404 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5405 current_block_number = block_stack[--depth];
5406 }
5407 }
5408
5409 if (n_blocks != next_block_number)
5410 abort ();
5411
5412 return block_vector;
5413 }
5414
5415 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5416 and a revised instruction chain, rebuild the tree structure
5417 of BLOCK nodes to correspond to the new order of RTL.
5418 The new block tree is inserted below TOP_BLOCK.
5419 Returns the current top-level block. */
5420
5421 tree
5422 reorder_blocks (block_vector, block, insns)
5423 tree *block_vector;
5424 tree block;
5425 rtx insns;
5426 {
5427 tree current_block = block;
5428 rtx insn;
5429
5430 if (block_vector == 0)
5431 return block;
5432
5433 /* Prune the old trees away, so that it doesn't get in the way. */
5434 BLOCK_SUBBLOCKS (current_block) = 0;
5435 BLOCK_CHAIN (current_block) = 0;
5436
5437 for (insn = insns; insn; insn = NEXT_INSN (insn))
5438 if (GET_CODE (insn) == NOTE)
5439 {
5440 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5441 {
5442 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5443 /* If we have seen this block before, copy it. */
5444 if (TREE_ASM_WRITTEN (block))
5445 block = copy_node (block);
5446 BLOCK_SUBBLOCKS (block) = 0;
5447 TREE_ASM_WRITTEN (block) = 1;
5448 BLOCK_SUPERCONTEXT (block) = current_block;
5449 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5450 BLOCK_SUBBLOCKS (current_block) = block;
5451 current_block = block;
5452 NOTE_SOURCE_FILE (insn) = 0;
5453 }
5454 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5455 {
5456 BLOCK_SUBBLOCKS (current_block)
5457 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5458 current_block = BLOCK_SUPERCONTEXT (current_block);
5459 NOTE_SOURCE_FILE (insn) = 0;
5460 }
5461 }
5462
5463 BLOCK_SUBBLOCKS (current_block)
5464 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5465 return current_block;
5466 }
5467
5468 /* Reverse the order of elements in the chain T of blocks,
5469 and return the new head of the chain (old last element). */
5470
5471 static tree
5472 blocks_nreverse (t)
5473 tree t;
5474 {
5475 register tree prev = 0, decl, next;
5476 for (decl = t; decl; decl = next)
5477 {
5478 next = BLOCK_CHAIN (decl);
5479 BLOCK_CHAIN (decl) = prev;
5480 prev = decl;
5481 }
5482 return prev;
5483 }
5484
5485 /* Count the subblocks of the list starting with BLOCK, and list them
5486 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5487 blocks. */
5488
5489 static int
5490 all_blocks (block, vector)
5491 tree block;
5492 tree *vector;
5493 {
5494 int n_blocks = 0;
5495
5496 while (block)
5497 {
5498 TREE_ASM_WRITTEN (block) = 0;
5499
5500 /* Record this block. */
5501 if (vector)
5502 vector[n_blocks] = block;
5503
5504 ++n_blocks;
5505
5506 /* Record the subblocks, and their subblocks... */
5507 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5508 vector ? vector + n_blocks : 0);
5509 block = BLOCK_CHAIN (block);
5510 }
5511
5512 return n_blocks;
5513 }
5514 \f
5515 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5516 and initialize static variables for generating RTL for the statements
5517 of the function. */
5518
5519 void
5520 init_function_start (subr, filename, line)
5521 tree subr;
5522 char *filename;
5523 int line;
5524 {
5525 init_stmt_for_function ();
5526
5527 cse_not_expected = ! optimize;
5528
5529 /* Caller save not needed yet. */
5530 caller_save_needed = 0;
5531
5532 /* No stack slots have been made yet. */
5533 stack_slot_list = 0;
5534
5535 /* There is no stack slot for handling nonlocal gotos. */
5536 nonlocal_goto_handler_slots = 0;
5537 nonlocal_goto_stack_level = 0;
5538
5539 /* No labels have been declared for nonlocal use. */
5540 nonlocal_labels = 0;
5541
5542 /* No function calls so far in this function. */
5543 function_call_count = 0;
5544
5545 /* No parm regs have been allocated.
5546 (This is important for output_inline_function.) */
5547 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5548
5549 /* Initialize the RTL mechanism. */
5550 init_emit ();
5551
5552 /* Initialize the queue of pending postincrement and postdecrements,
5553 and some other info in expr.c. */
5554 init_expr ();
5555
5556 /* We haven't done register allocation yet. */
5557 reg_renumber = 0;
5558
5559 init_const_rtx_hash_table ();
5560
5561 current_function_name = (*decl_printable_name) (subr, 2);
5562
5563 /* Nonzero if this is a nested function that uses a static chain. */
5564
5565 current_function_needs_context
5566 = (decl_function_context (current_function_decl) != 0
5567 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5568
5569 /* Set if a call to setjmp is seen. */
5570 current_function_calls_setjmp = 0;
5571
5572 /* Set if a call to longjmp is seen. */
5573 current_function_calls_longjmp = 0;
5574
5575 current_function_calls_alloca = 0;
5576 current_function_has_nonlocal_label = 0;
5577 current_function_has_nonlocal_goto = 0;
5578 current_function_contains_functions = 0;
5579 current_function_sp_is_unchanging = 0;
5580 current_function_addresses_labels = 0;
5581 current_function_is_thunk = 0;
5582
5583 current_function_returns_pcc_struct = 0;
5584 current_function_returns_struct = 0;
5585 current_function_epilogue_delay_list = 0;
5586 current_function_uses_const_pool = 0;
5587 current_function_uses_pic_offset_table = 0;
5588 current_function_cannot_inline = 0;
5589
5590 /* We have not yet needed to make a label to jump to for tail-recursion. */
5591 tail_recursion_label = 0;
5592
5593 /* We haven't had a need to make a save area for ap yet. */
5594
5595 arg_pointer_save_area = 0;
5596
5597 /* No stack slots allocated yet. */
5598 frame_offset = 0;
5599
5600 /* No SAVE_EXPRs in this function yet. */
5601 save_expr_regs = 0;
5602
5603 /* No RTL_EXPRs in this function yet. */
5604 rtl_expr_chain = 0;
5605
5606 /* Set up to allocate temporaries. */
5607 init_temp_slots ();
5608
5609 /* Within function body, compute a type's size as soon it is laid out. */
5610 immediate_size_expand++;
5611
5612 /* We haven't made any trampolines for this function yet. */
5613 trampoline_list = 0;
5614
5615 init_pending_stack_adjust ();
5616 inhibit_defer_pop = 0;
5617
5618 current_function_outgoing_args_size = 0;
5619
5620 /* Prevent ever trying to delete the first instruction of a function.
5621 Also tell final how to output a linenum before the function prologue.
5622 Note linenums could be missing, e.g. when compiling a Java .class file. */
5623 if (line > 0)
5624 emit_line_note (filename, line);
5625
5626 /* Make sure first insn is a note even if we don't want linenums.
5627 This makes sure the first insn will never be deleted.
5628 Also, final expects a note to appear there. */
5629 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5630
5631 /* Set flags used by final.c. */
5632 if (aggregate_value_p (DECL_RESULT (subr)))
5633 {
5634 #ifdef PCC_STATIC_STRUCT_RETURN
5635 current_function_returns_pcc_struct = 1;
5636 #endif
5637 current_function_returns_struct = 1;
5638 }
5639
5640 /* Warn if this value is an aggregate type,
5641 regardless of which calling convention we are using for it. */
5642 if (warn_aggregate_return
5643 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5644 warning ("function returns an aggregate");
5645
5646 current_function_returns_pointer
5647 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5648
5649 /* Indicate that we need to distinguish between the return value of the
5650 present function and the return value of a function being called. */
5651 rtx_equal_function_value_matters = 1;
5652
5653 /* Indicate that we have not instantiated virtual registers yet. */
5654 virtuals_instantiated = 0;
5655
5656 /* Indicate we have no need of a frame pointer yet. */
5657 frame_pointer_needed = 0;
5658
5659 /* By default assume not varargs or stdarg. */
5660 current_function_varargs = 0;
5661 current_function_stdarg = 0;
5662 }
5663
5664 /* Indicate that the current function uses extra args
5665 not explicitly mentioned in the argument list in any fashion. */
5666
5667 void
5668 mark_varargs ()
5669 {
5670 current_function_varargs = 1;
5671 }
5672
5673 /* Expand a call to __main at the beginning of a possible main function. */
5674
5675 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5676 #undef HAS_INIT_SECTION
5677 #define HAS_INIT_SECTION
5678 #endif
5679
5680 void
5681 expand_main_function ()
5682 {
5683 #if !defined (HAS_INIT_SECTION)
5684 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5685 VOIDmode, 0);
5686 #endif /* not HAS_INIT_SECTION */
5687 }
5688 \f
5689 extern struct obstack permanent_obstack;
5690
5691 /* Start the RTL for a new function, and set variables used for
5692 emitting RTL.
5693 SUBR is the FUNCTION_DECL node.
5694 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5695 the function's parameters, which must be run at any return statement. */
5696
5697 void
5698 expand_function_start (subr, parms_have_cleanups)
5699 tree subr;
5700 int parms_have_cleanups;
5701 {
5702 register int i;
5703 tree tem;
5704 rtx last_ptr = NULL_RTX;
5705
5706 /* Make sure volatile mem refs aren't considered
5707 valid operands of arithmetic insns. */
5708 init_recog_no_volatile ();
5709
5710 /* Set this before generating any memory accesses. */
5711 current_function_check_memory_usage
5712 = (flag_check_memory_usage
5713 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5714
5715 current_function_instrument_entry_exit
5716 = (flag_instrument_function_entry_exit
5717 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5718
5719 /* If function gets a static chain arg, store it in the stack frame.
5720 Do this first, so it gets the first stack slot offset. */
5721 if (current_function_needs_context)
5722 {
5723 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5724
5725 /* Delay copying static chain if it is not a register to avoid
5726 conflicts with regs used for parameters. */
5727 if (! SMALL_REGISTER_CLASSES
5728 || GET_CODE (static_chain_incoming_rtx) == REG)
5729 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5730 }
5731
5732 /* If the parameters of this function need cleaning up, get a label
5733 for the beginning of the code which executes those cleanups. This must
5734 be done before doing anything with return_label. */
5735 if (parms_have_cleanups)
5736 cleanup_label = gen_label_rtx ();
5737 else
5738 cleanup_label = 0;
5739
5740 /* Make the label for return statements to jump to, if this machine
5741 does not have a one-instruction return and uses an epilogue,
5742 or if it returns a structure, or if it has parm cleanups. */
5743 #ifdef HAVE_return
5744 if (cleanup_label == 0 && HAVE_return
5745 && ! current_function_instrument_entry_exit
5746 && ! current_function_returns_pcc_struct
5747 && ! (current_function_returns_struct && ! optimize))
5748 return_label = 0;
5749 else
5750 return_label = gen_label_rtx ();
5751 #else
5752 return_label = gen_label_rtx ();
5753 #endif
5754
5755 /* Initialize rtx used to return the value. */
5756 /* Do this before assign_parms so that we copy the struct value address
5757 before any library calls that assign parms might generate. */
5758
5759 /* Decide whether to return the value in memory or in a register. */
5760 if (aggregate_value_p (DECL_RESULT (subr)))
5761 {
5762 /* Returning something that won't go in a register. */
5763 register rtx value_address = 0;
5764
5765 #ifdef PCC_STATIC_STRUCT_RETURN
5766 if (current_function_returns_pcc_struct)
5767 {
5768 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5769 value_address = assemble_static_space (size);
5770 }
5771 else
5772 #endif
5773 {
5774 /* Expect to be passed the address of a place to store the value.
5775 If it is passed as an argument, assign_parms will take care of
5776 it. */
5777 if (struct_value_incoming_rtx)
5778 {
5779 value_address = gen_reg_rtx (Pmode);
5780 emit_move_insn (value_address, struct_value_incoming_rtx);
5781 }
5782 }
5783 if (value_address)
5784 {
5785 DECL_RTL (DECL_RESULT (subr))
5786 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5787 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5788 AGGREGATE_TYPE_P (TREE_TYPE
5789 (DECL_RESULT
5790 (subr))));
5791 }
5792 }
5793 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5794 /* If return mode is void, this decl rtl should not be used. */
5795 DECL_RTL (DECL_RESULT (subr)) = 0;
5796 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5797 {
5798 /* If function will end with cleanup code for parms,
5799 compute the return values into a pseudo reg,
5800 which we will copy into the true return register
5801 after the cleanups are done. */
5802
5803 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5804
5805 #ifdef PROMOTE_FUNCTION_RETURN
5806 tree type = TREE_TYPE (DECL_RESULT (subr));
5807 int unsignedp = TREE_UNSIGNED (type);
5808
5809 mode = promote_mode (type, mode, &unsignedp, 1);
5810 #endif
5811
5812 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5813 }
5814 else
5815 /* Scalar, returned in a register. */
5816 {
5817 #ifdef FUNCTION_OUTGOING_VALUE
5818 DECL_RTL (DECL_RESULT (subr))
5819 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5820 #else
5821 DECL_RTL (DECL_RESULT (subr))
5822 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5823 #endif
5824
5825 /* Mark this reg as the function's return value. */
5826 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5827 {
5828 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5829 /* Needed because we may need to move this to memory
5830 in case it's a named return value whose address is taken. */
5831 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5832 }
5833 }
5834
5835 /* Initialize rtx for parameters and local variables.
5836 In some cases this requires emitting insns. */
5837
5838 assign_parms (subr, 0);
5839
5840 /* Copy the static chain now if it wasn't a register. The delay is to
5841 avoid conflicts with the parameter passing registers. */
5842
5843 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5844 if (GET_CODE (static_chain_incoming_rtx) != REG)
5845 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5846
5847 /* The following was moved from init_function_start.
5848 The move is supposed to make sdb output more accurate. */
5849 /* Indicate the beginning of the function body,
5850 as opposed to parm setup. */
5851 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5852
5853 /* If doing stupid allocation, mark parms as born here. */
5854
5855 if (GET_CODE (get_last_insn ()) != NOTE)
5856 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5857 parm_birth_insn = get_last_insn ();
5858
5859 if (obey_regdecls)
5860 {
5861 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5862 use_variable (regno_reg_rtx[i]);
5863
5864 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5865 use_variable (current_function_internal_arg_pointer);
5866 }
5867
5868 context_display = 0;
5869 if (current_function_needs_context)
5870 {
5871 /* Fetch static chain values for containing functions. */
5872 tem = decl_function_context (current_function_decl);
5873 /* If not doing stupid register allocation copy the static chain
5874 pointer into a pseudo. If we have small register classes, copy
5875 the value from memory if static_chain_incoming_rtx is a REG. If
5876 we do stupid register allocation, we use the stack address
5877 generated above. */
5878 if (tem && ! obey_regdecls)
5879 {
5880 /* If the static chain originally came in a register, put it back
5881 there, then move it out in the next insn. The reason for
5882 this peculiar code is to satisfy function integration. */
5883 if (SMALL_REGISTER_CLASSES
5884 && GET_CODE (static_chain_incoming_rtx) == REG)
5885 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5886 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5887 }
5888
5889 while (tem)
5890 {
5891 tree rtlexp = make_node (RTL_EXPR);
5892
5893 RTL_EXPR_RTL (rtlexp) = last_ptr;
5894 context_display = tree_cons (tem, rtlexp, context_display);
5895 tem = decl_function_context (tem);
5896 if (tem == 0)
5897 break;
5898 /* Chain thru stack frames, assuming pointer to next lexical frame
5899 is found at the place we always store it. */
5900 #ifdef FRAME_GROWS_DOWNWARD
5901 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5902 #endif
5903 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5904 memory_address (Pmode, last_ptr)));
5905
5906 /* If we are not optimizing, ensure that we know that this
5907 piece of context is live over the entire function. */
5908 if (! optimize)
5909 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5910 save_expr_regs);
5911 }
5912 }
5913
5914 if (current_function_instrument_entry_exit)
5915 {
5916 rtx fun = DECL_RTL (current_function_decl);
5917 if (GET_CODE (fun) == MEM)
5918 fun = XEXP (fun, 0);
5919 else
5920 abort ();
5921 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
5922 fun, Pmode,
5923 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5924 0,
5925 hard_frame_pointer_rtx),
5926 Pmode);
5927 }
5928
5929 /* After the display initializations is where the tail-recursion label
5930 should go, if we end up needing one. Ensure we have a NOTE here
5931 since some things (like trampolines) get placed before this. */
5932 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5933
5934 /* Evaluate now the sizes of any types declared among the arguments. */
5935 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5936 {
5937 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5938 EXPAND_MEMORY_USE_BAD);
5939 /* Flush the queue in case this parameter declaration has
5940 side-effects. */
5941 emit_queue ();
5942 }
5943
5944 /* Make sure there is a line number after the function entry setup code. */
5945 force_next_line_note ();
5946 }
5947 \f
5948 /* Generate RTL for the end of the current function.
5949 FILENAME and LINE are the current position in the source file.
5950
5951 It is up to language-specific callers to do cleanups for parameters--
5952 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5953
5954 void
5955 expand_function_end (filename, line, end_bindings)
5956 char *filename;
5957 int line;
5958 int end_bindings;
5959 {
5960 register int i;
5961 tree link;
5962
5963 #ifdef TRAMPOLINE_TEMPLATE
5964 static rtx initial_trampoline;
5965 #endif
5966
5967 #ifdef NON_SAVING_SETJMP
5968 /* Don't put any variables in registers if we call setjmp
5969 on a machine that fails to restore the registers. */
5970 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5971 {
5972 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5973 setjmp_protect (DECL_INITIAL (current_function_decl));
5974
5975 setjmp_protect_args ();
5976 }
5977 #endif
5978
5979 /* Save the argument pointer if a save area was made for it. */
5980 if (arg_pointer_save_area)
5981 {
5982 /* arg_pointer_save_area may not be a valid memory address, so we
5983 have to check it and fix it if necessary. */
5984 rtx seq;
5985 start_sequence ();
5986 emit_move_insn (validize_mem (arg_pointer_save_area),
5987 virtual_incoming_args_rtx);
5988 seq = gen_sequence ();
5989 end_sequence ();
5990 emit_insn_before (seq, tail_recursion_reentry);
5991 }
5992
5993 /* Initialize any trampolines required by this function. */
5994 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5995 {
5996 tree function = TREE_PURPOSE (link);
5997 rtx context = lookup_static_chain (function);
5998 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5999 #ifdef TRAMPOLINE_TEMPLATE
6000 rtx blktramp;
6001 #endif
6002 rtx seq;
6003
6004 #ifdef TRAMPOLINE_TEMPLATE
6005 /* First make sure this compilation has a template for
6006 initializing trampolines. */
6007 if (initial_trampoline == 0)
6008 {
6009 end_temporary_allocation ();
6010 initial_trampoline
6011 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6012 resume_temporary_allocation ();
6013 }
6014 #endif
6015
6016 /* Generate insns to initialize the trampoline. */
6017 start_sequence ();
6018 tramp = round_trampoline_addr (XEXP (tramp, 0));
6019 #ifdef TRAMPOLINE_TEMPLATE
6020 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6021 emit_block_move (blktramp, initial_trampoline,
6022 GEN_INT (TRAMPOLINE_SIZE),
6023 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6024 #endif
6025 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6026 seq = get_insns ();
6027 end_sequence ();
6028
6029 /* Put those insns at entry to the containing function (this one). */
6030 emit_insns_before (seq, tail_recursion_reentry);
6031 }
6032
6033 /* If we are doing stack checking and this function makes calls,
6034 do a stack probe at the start of the function to ensure we have enough
6035 space for another stack frame. */
6036 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6037 {
6038 rtx insn, seq;
6039
6040 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6041 if (GET_CODE (insn) == CALL_INSN)
6042 {
6043 start_sequence ();
6044 probe_stack_range (STACK_CHECK_PROTECT,
6045 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6046 seq = get_insns ();
6047 end_sequence ();
6048 emit_insns_before (seq, tail_recursion_reentry);
6049 break;
6050 }
6051 }
6052
6053 /* Warn about unused parms if extra warnings were specified. */
6054 if (warn_unused && extra_warnings)
6055 {
6056 tree decl;
6057
6058 for (decl = DECL_ARGUMENTS (current_function_decl);
6059 decl; decl = TREE_CHAIN (decl))
6060 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6061 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6062 warning_with_decl (decl, "unused parameter `%s'");
6063 }
6064
6065 /* Delete handlers for nonlocal gotos if nothing uses them. */
6066 if (nonlocal_goto_handler_slots != 0
6067 && ! current_function_has_nonlocal_label)
6068 delete_handlers ();
6069
6070 /* End any sequences that failed to be closed due to syntax errors. */
6071 while (in_sequence_p ())
6072 end_sequence ();
6073
6074 /* Outside function body, can't compute type's actual size
6075 until next function's body starts. */
6076 immediate_size_expand--;
6077
6078 /* If doing stupid register allocation,
6079 mark register parms as dying here. */
6080
6081 if (obey_regdecls)
6082 {
6083 rtx tem;
6084 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6085 use_variable (regno_reg_rtx[i]);
6086
6087 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6088
6089 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6090 {
6091 use_variable (XEXP (tem, 0));
6092 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6093 }
6094
6095 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6096 use_variable (current_function_internal_arg_pointer);
6097 }
6098
6099 clear_pending_stack_adjust ();
6100 do_pending_stack_adjust ();
6101
6102 /* Mark the end of the function body.
6103 If control reaches this insn, the function can drop through
6104 without returning a value. */
6105 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6106
6107 /* Must mark the last line number note in the function, so that the test
6108 coverage code can avoid counting the last line twice. This just tells
6109 the code to ignore the immediately following line note, since there
6110 already exists a copy of this note somewhere above. This line number
6111 note is still needed for debugging though, so we can't delete it. */
6112 if (flag_test_coverage)
6113 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6114
6115 /* Output a linenumber for the end of the function.
6116 SDB depends on this. */
6117 emit_line_note_force (filename, line);
6118
6119 /* Output the label for the actual return from the function,
6120 if one is expected. This happens either because a function epilogue
6121 is used instead of a return instruction, or because a return was done
6122 with a goto in order to run local cleanups, or because of pcc-style
6123 structure returning. */
6124
6125 if (return_label)
6126 emit_label (return_label);
6127
6128 /* C++ uses this. */
6129 if (end_bindings)
6130 expand_end_bindings (0, 0, 0);
6131
6132 /* Now handle any leftover exception regions that may have been
6133 created for the parameters. */
6134 {
6135 rtx last = get_last_insn ();
6136 rtx label;
6137
6138 expand_leftover_cleanups ();
6139
6140 /* If the above emitted any code, may sure we jump around it. */
6141 if (last != get_last_insn ())
6142 {
6143 label = gen_label_rtx ();
6144 last = emit_jump_insn_after (gen_jump (label), last);
6145 last = emit_barrier_after (last);
6146 emit_label (label);
6147 }
6148 }
6149
6150 if (current_function_instrument_entry_exit)
6151 {
6152 rtx fun = DECL_RTL (current_function_decl);
6153 if (GET_CODE (fun) == MEM)
6154 fun = XEXP (fun, 0);
6155 else
6156 abort ();
6157 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6158 fun, Pmode,
6159 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6160 0,
6161 hard_frame_pointer_rtx),
6162 Pmode);
6163 }
6164
6165 /* If we had calls to alloca, and this machine needs
6166 an accurate stack pointer to exit the function,
6167 insert some code to save and restore the stack pointer. */
6168 #ifdef EXIT_IGNORE_STACK
6169 if (! EXIT_IGNORE_STACK)
6170 #endif
6171 if (current_function_calls_alloca)
6172 {
6173 rtx tem = 0;
6174
6175 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6176 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6177 }
6178
6179 /* If scalar return value was computed in a pseudo-reg,
6180 copy that to the hard return register. */
6181 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6182 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6183 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6184 >= FIRST_PSEUDO_REGISTER))
6185 {
6186 rtx real_decl_result;
6187
6188 #ifdef FUNCTION_OUTGOING_VALUE
6189 real_decl_result
6190 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6191 current_function_decl);
6192 #else
6193 real_decl_result
6194 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6195 current_function_decl);
6196 #endif
6197 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6198 /* If this is a BLKmode structure being returned in registers, then use
6199 the mode computed in expand_return. */
6200 if (GET_MODE (real_decl_result) == BLKmode)
6201 PUT_MODE (real_decl_result,
6202 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6203 emit_move_insn (real_decl_result,
6204 DECL_RTL (DECL_RESULT (current_function_decl)));
6205 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6206
6207 /* The delay slot scheduler assumes that current_function_return_rtx
6208 holds the hard register containing the return value, not a temporary
6209 pseudo. */
6210 current_function_return_rtx = real_decl_result;
6211 }
6212
6213 /* If returning a structure, arrange to return the address of the value
6214 in a place where debuggers expect to find it.
6215
6216 If returning a structure PCC style,
6217 the caller also depends on this value.
6218 And current_function_returns_pcc_struct is not necessarily set. */
6219 if (current_function_returns_struct
6220 || current_function_returns_pcc_struct)
6221 {
6222 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6223 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6224 #ifdef FUNCTION_OUTGOING_VALUE
6225 rtx outgoing
6226 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6227 current_function_decl);
6228 #else
6229 rtx outgoing
6230 = FUNCTION_VALUE (build_pointer_type (type),
6231 current_function_decl);
6232 #endif
6233
6234 /* Mark this as a function return value so integrate will delete the
6235 assignment and USE below when inlining this function. */
6236 REG_FUNCTION_VALUE_P (outgoing) = 1;
6237
6238 emit_move_insn (outgoing, value_address);
6239 use_variable (outgoing);
6240 }
6241
6242 /* If this is an implementation of __throw, do what's necessary to
6243 communicate between __builtin_eh_return and the epilogue. */
6244 expand_eh_return ();
6245
6246 /* Output a return insn if we are using one.
6247 Otherwise, let the rtl chain end here, to drop through
6248 into the epilogue. */
6249
6250 #ifdef HAVE_return
6251 if (HAVE_return)
6252 {
6253 emit_jump_insn (gen_return ());
6254 emit_barrier ();
6255 }
6256 #endif
6257
6258 /* Fix up any gotos that jumped out to the outermost
6259 binding level of the function.
6260 Must follow emitting RETURN_LABEL. */
6261
6262 /* If you have any cleanups to do at this point,
6263 and they need to create temporary variables,
6264 then you will lose. */
6265 expand_fixups (get_insns ());
6266 }
6267 \f
6268 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6269
6270 static int *prologue;
6271 static int *epilogue;
6272
6273 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6274 or a single insn). */
6275
6276 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6277 static int *
6278 record_insns (insns)
6279 rtx insns;
6280 {
6281 int *vec;
6282
6283 if (GET_CODE (insns) == SEQUENCE)
6284 {
6285 int len = XVECLEN (insns, 0);
6286 vec = (int *) oballoc ((len + 1) * sizeof (int));
6287 vec[len] = 0;
6288 while (--len >= 0)
6289 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6290 }
6291 else
6292 {
6293 vec = (int *) oballoc (2 * sizeof (int));
6294 vec[0] = INSN_UID (insns);
6295 vec[1] = 0;
6296 }
6297 return vec;
6298 }
6299
6300 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6301
6302 static int
6303 contains (insn, vec)
6304 rtx insn;
6305 int *vec;
6306 {
6307 register int i, j;
6308
6309 if (GET_CODE (insn) == INSN
6310 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6311 {
6312 int count = 0;
6313 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6314 for (j = 0; vec[j]; j++)
6315 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6316 count++;
6317 return count;
6318 }
6319 else
6320 {
6321 for (j = 0; vec[j]; j++)
6322 if (INSN_UID (insn) == vec[j])
6323 return 1;
6324 }
6325 return 0;
6326 }
6327 #endif /* HAVE_prologue || HAVE_epilogue */
6328
6329 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6330 this into place with notes indicating where the prologue ends and where
6331 the epilogue begins. Update the basic block information when possible. */
6332
6333 void
6334 thread_prologue_and_epilogue_insns (f)
6335 rtx f ATTRIBUTE_UNUSED;
6336 {
6337 #ifdef HAVE_prologue
6338 if (HAVE_prologue)
6339 {
6340 rtx head, seq;
6341
6342 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6343 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6344 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6345 seq = gen_prologue ();
6346 head = emit_insn_after (seq, f);
6347
6348 /* Include the new prologue insns in the first block. Ignore them
6349 if they form a basic block unto themselves. */
6350 if (x_basic_block_head && n_basic_blocks
6351 && GET_CODE (BLOCK_HEAD (0)) != CODE_LABEL)
6352 BLOCK_HEAD (0) = NEXT_INSN (f);
6353
6354 /* Retain a map of the prologue insns. */
6355 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6356 }
6357 else
6358 #endif
6359 prologue = 0;
6360
6361 #ifdef HAVE_epilogue
6362 if (HAVE_epilogue)
6363 {
6364 rtx insn = get_last_insn ();
6365 rtx prev = prev_nonnote_insn (insn);
6366
6367 /* If we end with a BARRIER, we don't need an epilogue. */
6368 if (! (prev && GET_CODE (prev) == BARRIER))
6369 {
6370 rtx tail, seq, tem;
6371 rtx first_use = 0;
6372 rtx last_use = 0;
6373
6374 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6375 epilogue insns, the USE insns at the end of a function,
6376 the jump insn that returns, and then a BARRIER. */
6377
6378 /* Move the USE insns at the end of a function onto a list. */
6379 while (prev
6380 && GET_CODE (prev) == INSN
6381 && GET_CODE (PATTERN (prev)) == USE)
6382 {
6383 tem = prev;
6384 prev = prev_nonnote_insn (prev);
6385
6386 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6387 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
6388 if (first_use)
6389 {
6390 NEXT_INSN (tem) = first_use;
6391 PREV_INSN (first_use) = tem;
6392 }
6393 first_use = tem;
6394 if (!last_use)
6395 last_use = tem;
6396 }
6397
6398 emit_barrier_after (insn);
6399
6400 seq = gen_epilogue ();
6401 tail = emit_jump_insn_after (seq, insn);
6402
6403 /* Insert the USE insns immediately before the return insn, which
6404 must be the first instruction before the final barrier. */
6405 if (first_use)
6406 {
6407 tem = prev_nonnote_insn (get_last_insn ());
6408 NEXT_INSN (PREV_INSN (tem)) = first_use;
6409 PREV_INSN (first_use) = PREV_INSN (tem);
6410 PREV_INSN (tem) = last_use;
6411 NEXT_INSN (last_use) = tem;
6412 }
6413
6414 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6415
6416 /* Include the new epilogue insns in the last block. Ignore
6417 them if they form a basic block unto themselves. */
6418 if (x_basic_block_end && n_basic_blocks
6419 && GET_CODE (BLOCK_END (n_basic_blocks - 1)) != JUMP_INSN)
6420 BLOCK_END (n_basic_blocks - 1) = tail;
6421
6422 /* Retain a map of the epilogue insns. */
6423 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6424 return;
6425 }
6426 }
6427 #endif
6428 epilogue = 0;
6429 }
6430
6431 /* Reposition the prologue-end and epilogue-begin notes after instruction
6432 scheduling and delayed branch scheduling. */
6433
6434 void
6435 reposition_prologue_and_epilogue_notes (f)
6436 rtx f ATTRIBUTE_UNUSED;
6437 {
6438 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6439 /* Reposition the prologue and epilogue notes. */
6440 if (n_basic_blocks)
6441 {
6442 rtx next, prev;
6443 int len;
6444
6445 if (prologue)
6446 {
6447 register rtx insn, note = 0;
6448
6449 /* Scan from the beginning until we reach the last prologue insn.
6450 We apparently can't depend on basic_block_{head,end} after
6451 reorg has run. */
6452 for (len = 0; prologue[len]; len++)
6453 ;
6454 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6455 {
6456 if (GET_CODE (insn) == NOTE)
6457 {
6458 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6459 note = insn;
6460 }
6461 else if ((len -= contains (insn, prologue)) == 0)
6462 {
6463 /* Find the prologue-end note if we haven't already, and
6464 move it to just after the last prologue insn. */
6465 if (note == 0)
6466 {
6467 for (note = insn; (note = NEXT_INSN (note));)
6468 if (GET_CODE (note) == NOTE
6469 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6470 break;
6471 }
6472
6473 next = NEXT_INSN (note);
6474 prev = PREV_INSN (note);
6475 if (prev)
6476 NEXT_INSN (prev) = next;
6477 if (next)
6478 PREV_INSN (next) = prev;
6479
6480 /* Whether or not we can depend on BLOCK_HEAD,
6481 attempt to keep it up-to-date. */
6482 if (BLOCK_HEAD (0) == note)
6483 BLOCK_HEAD (0) = next;
6484
6485 add_insn_after (note, insn);
6486 }
6487 }
6488 }
6489
6490 if (epilogue)
6491 {
6492 register rtx insn, note = 0;
6493
6494 /* Scan from the end until we reach the first epilogue insn.
6495 We apparently can't depend on basic_block_{head,end} after
6496 reorg has run. */
6497 for (len = 0; epilogue[len]; len++)
6498 ;
6499 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6500 {
6501 if (GET_CODE (insn) == NOTE)
6502 {
6503 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6504 note = insn;
6505 }
6506 else if ((len -= contains (insn, epilogue)) == 0)
6507 {
6508 /* Find the epilogue-begin note if we haven't already, and
6509 move it to just before the first epilogue insn. */
6510 if (note == 0)
6511 {
6512 for (note = insn; (note = PREV_INSN (note));)
6513 if (GET_CODE (note) == NOTE
6514 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6515 break;
6516 }
6517 next = NEXT_INSN (note);
6518 prev = PREV_INSN (note);
6519 if (prev)
6520 NEXT_INSN (prev) = next;
6521 if (next)
6522 PREV_INSN (next) = prev;
6523
6524 /* Whether or not we can depend on BLOCK_HEAD,
6525 attempt to keep it up-to-date. */
6526 if (n_basic_blocks
6527 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6528 BLOCK_HEAD (n_basic_blocks-1) = note;
6529
6530 add_insn_before (note, insn);
6531 }
6532 }
6533 }
6534 }
6535 #endif /* HAVE_prologue or HAVE_epilogue */
6536 }
This page took 0.340284 seconds and 6 git commands to generate.