]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
Major cutover to using system.h:
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58
59 #ifndef TRAMPOLINE_ALIGNMENT
60 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
61 #endif
62
63 /* Some systems use __main in a way incompatible with its use in gcc, in these
64 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
65 give the same symbol without quotes for an alternative entry point. You
66 must define both, or neither. */
67 #ifndef NAME__MAIN
68 #define NAME__MAIN "__main"
69 #define SYMBOL__MAIN __main
70 #endif
71
72 /* Round a value to the lowest integer less than it that is a multiple of
73 the required alignment. Avoid using division in case the value is
74 negative. Assume the alignment is a power of two. */
75 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
76
77 /* Similar, but round to the next highest integer that meets the
78 alignment. */
79 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
80
81 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
82 during rtl generation. If they are different register numbers, this is
83 always true. It may also be true if
84 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
85 generation. See fix_lexical_addr for details. */
86
87 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
88 #define NEED_SEPARATE_AP
89 #endif
90
91 /* Number of bytes of args popped by function being compiled on its return.
92 Zero if no bytes are to be popped.
93 May affect compilation of return insn or of function epilogue. */
94
95 int current_function_pops_args;
96
97 /* Nonzero if function being compiled needs to be given an address
98 where the value should be stored. */
99
100 int current_function_returns_struct;
101
102 /* Nonzero if function being compiled needs to
103 return the address of where it has put a structure value. */
104
105 int current_function_returns_pcc_struct;
106
107 /* Nonzero if function being compiled needs to be passed a static chain. */
108
109 int current_function_needs_context;
110
111 /* Nonzero if function being compiled can call setjmp. */
112
113 int current_function_calls_setjmp;
114
115 /* Nonzero if function being compiled can call longjmp. */
116
117 int current_function_calls_longjmp;
118
119 /* Nonzero if function being compiled receives nonlocal gotos
120 from nested functions. */
121
122 int current_function_has_nonlocal_label;
123
124 /* Nonzero if function being compiled has nonlocal gotos to parent
125 function. */
126
127 int current_function_has_nonlocal_goto;
128
129 /* Nonzero if function being compiled contains nested functions. */
130
131 int current_function_contains_functions;
132
133 /* Nonzero if the current function is a thunk (a lightweight function that
134 just adjusts one of its arguments and forwards to another function), so
135 we should try to cut corners where we can. */
136 int current_function_is_thunk;
137
138 /* Nonzero if function being compiled can call alloca,
139 either as a subroutine or builtin. */
140
141 int current_function_calls_alloca;
142
143 /* Nonzero if the current function returns a pointer type */
144
145 int current_function_returns_pointer;
146
147 /* If some insns can be deferred to the delay slots of the epilogue, the
148 delay list for them is recorded here. */
149
150 rtx current_function_epilogue_delay_list;
151
152 /* If function's args have a fixed size, this is that size, in bytes.
153 Otherwise, it is -1.
154 May affect compilation of return insn or of function epilogue. */
155
156 int current_function_args_size;
157
158 /* # bytes the prologue should push and pretend that the caller pushed them.
159 The prologue must do this, but only if parms can be passed in registers. */
160
161 int current_function_pretend_args_size;
162
163 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
164 defined, the needed space is pushed by the prologue. */
165
166 int current_function_outgoing_args_size;
167
168 /* This is the offset from the arg pointer to the place where the first
169 anonymous arg can be found, if there is one. */
170
171 rtx current_function_arg_offset_rtx;
172
173 /* Nonzero if current function uses varargs.h or equivalent.
174 Zero for functions that use stdarg.h. */
175
176 int current_function_varargs;
177
178 /* Nonzero if current function uses stdarg.h or equivalent.
179 Zero for functions that use varargs.h. */
180
181 int current_function_stdarg;
182
183 /* Quantities of various kinds of registers
184 used for the current function's args. */
185
186 CUMULATIVE_ARGS current_function_args_info;
187
188 /* Name of function now being compiled. */
189
190 char *current_function_name;
191
192 /* If non-zero, an RTL expression for the location at which the current
193 function returns its result. If the current function returns its
194 result in a register, current_function_return_rtx will always be
195 the hard register containing the result. */
196
197 rtx current_function_return_rtx;
198
199 /* Nonzero if the current function uses the constant pool. */
200
201 int current_function_uses_const_pool;
202
203 /* Nonzero if the current function uses pic_offset_table_rtx. */
204 int current_function_uses_pic_offset_table;
205
206 /* The arg pointer hard register, or the pseudo into which it was copied. */
207 rtx current_function_internal_arg_pointer;
208
209 /* The FUNCTION_DECL for an inline function currently being expanded. */
210 tree inline_function_decl;
211
212 /* Number of function calls seen so far in current function. */
213
214 int function_call_count;
215
216 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
217 (labels to which there can be nonlocal gotos from nested functions)
218 in this function. */
219
220 tree nonlocal_labels;
221
222 /* RTX for stack slot that holds the current handler for nonlocal gotos.
223 Zero when function does not have nonlocal labels. */
224
225 rtx nonlocal_goto_handler_slot;
226
227 /* RTX for stack slot that holds the stack pointer value to restore
228 for a nonlocal goto.
229 Zero when function does not have nonlocal labels. */
230
231 rtx nonlocal_goto_stack_level;
232
233 /* Label that will go on parm cleanup code, if any.
234 Jumping to this label runs cleanup code for parameters, if
235 such code must be run. Following this code is the logical return label. */
236
237 rtx cleanup_label;
238
239 /* Label that will go on function epilogue.
240 Jumping to this label serves as a "return" instruction
241 on machines which require execution of the epilogue on all returns. */
242
243 rtx return_label;
244
245 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
246 So we can mark them all live at the end of the function, if nonopt. */
247 rtx save_expr_regs;
248
249 /* List (chain of EXPR_LISTs) of all stack slots in this function.
250 Made for the sake of unshare_all_rtl. */
251 rtx stack_slot_list;
252
253 /* Chain of all RTL_EXPRs that have insns in them. */
254 tree rtl_expr_chain;
255
256 /* Label to jump back to for tail recursion, or 0 if we have
257 not yet needed one for this function. */
258 rtx tail_recursion_label;
259
260 /* Place after which to insert the tail_recursion_label if we need one. */
261 rtx tail_recursion_reentry;
262
263 /* Location at which to save the argument pointer if it will need to be
264 referenced. There are two cases where this is done: if nonlocal gotos
265 exist, or if vars stored at an offset from the argument pointer will be
266 needed by inner routines. */
267
268 rtx arg_pointer_save_area;
269
270 /* Offset to end of allocated area of stack frame.
271 If stack grows down, this is the address of the last stack slot allocated.
272 If stack grows up, this is the address for the next slot. */
273 HOST_WIDE_INT frame_offset;
274
275 /* List (chain of TREE_LISTs) of static chains for containing functions.
276 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
277 in an RTL_EXPR in the TREE_VALUE. */
278 static tree context_display;
279
280 /* List (chain of TREE_LISTs) of trampolines for nested functions.
281 The trampoline sets up the static chain and jumps to the function.
282 We supply the trampoline's address when the function's address is requested.
283
284 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
285 in an RTL_EXPR in the TREE_VALUE. */
286 static tree trampoline_list;
287
288 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
289 static rtx parm_birth_insn;
290
291 #if 0
292 /* Nonzero if a stack slot has been generated whose address is not
293 actually valid. It means that the generated rtl must all be scanned
294 to detect and correct the invalid addresses where they occur. */
295 static int invalid_stack_slot;
296 #endif
297
298 /* Last insn of those whose job was to put parms into their nominal homes. */
299 static rtx last_parm_insn;
300
301 /* 1 + last pseudo register number possibly used for loading a copy
302 of a parameter of this function. */
303 int max_parm_reg;
304
305 /* Vector indexed by REGNO, containing location on stack in which
306 to put the parm which is nominally in pseudo register REGNO,
307 if we discover that that parm must go in the stack. The highest
308 element in this vector is one less than MAX_PARM_REG, above. */
309 rtx *parm_reg_stack_loc;
310
311 /* Nonzero once virtual register instantiation has been done.
312 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
313 static int virtuals_instantiated;
314
315 /* These variables hold pointers to functions to
316 save and restore machine-specific data,
317 in push_function_context and pop_function_context. */
318 void (*save_machine_status) PROTO((struct function *));
319 void (*restore_machine_status) PROTO((struct function *));
320
321 /* Nonzero if we need to distinguish between the return value of this function
322 and the return value of a function called by this function. This helps
323 integrate.c */
324
325 extern int rtx_equal_function_value_matters;
326 extern tree sequence_rtl_expr;
327 \f
328 /* In order to evaluate some expressions, such as function calls returning
329 structures in memory, we need to temporarily allocate stack locations.
330 We record each allocated temporary in the following structure.
331
332 Associated with each temporary slot is a nesting level. When we pop up
333 one level, all temporaries associated with the previous level are freed.
334 Normally, all temporaries are freed after the execution of the statement
335 in which they were created. However, if we are inside a ({...}) grouping,
336 the result may be in a temporary and hence must be preserved. If the
337 result could be in a temporary, we preserve it if we can determine which
338 one it is in. If we cannot determine which temporary may contain the
339 result, all temporaries are preserved. A temporary is preserved by
340 pretending it was allocated at the previous nesting level.
341
342 Automatic variables are also assigned temporary slots, at the nesting
343 level where they are defined. They are marked a "kept" so that
344 free_temp_slots will not free them. */
345
346 struct temp_slot
347 {
348 /* Points to next temporary slot. */
349 struct temp_slot *next;
350 /* The rtx to used to reference the slot. */
351 rtx slot;
352 /* The rtx used to represent the address if not the address of the
353 slot above. May be an EXPR_LIST if multiple addresses exist. */
354 rtx address;
355 /* The size, in units, of the slot. */
356 int size;
357 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
358 tree rtl_expr;
359 /* Non-zero if this temporary is currently in use. */
360 char in_use;
361 /* Non-zero if this temporary has its address taken. */
362 char addr_taken;
363 /* Nesting level at which this slot is being used. */
364 int level;
365 /* Non-zero if this should survive a call to free_temp_slots. */
366 int keep;
367 /* The offset of the slot from the frame_pointer, including extra space
368 for alignment. This info is for combine_temp_slots. */
369 int base_offset;
370 /* The size of the slot, including extra space for alignment. This
371 info is for combine_temp_slots. */
372 int full_size;
373 };
374
375 /* List of all temporaries allocated, both available and in use. */
376
377 struct temp_slot *temp_slots;
378
379 /* Current nesting level for temporaries. */
380
381 int temp_slot_level;
382 \f
383 /* This structure is used to record MEMs or pseudos used to replace VAR, any
384 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
385 maintain this list in case two operands of an insn were required to match;
386 in that case we must ensure we use the same replacement. */
387
388 struct fixup_replacement
389 {
390 rtx old;
391 rtx new;
392 struct fixup_replacement *next;
393 };
394
395 /* Forward declarations. */
396
397 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
398 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
399 enum machine_mode, enum machine_mode,
400 int, int));
401 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
402 static struct fixup_replacement
403 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
404 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
405 rtx, int));
406 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
407 struct fixup_replacement **));
408 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
409 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
410 static rtx fixup_stack_1 PROTO((rtx, rtx));
411 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
412 static void instantiate_decls PROTO((tree, int));
413 static void instantiate_decls_1 PROTO((tree, int));
414 static void instantiate_decl PROTO((rtx, int, int));
415 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
416 static void delete_handlers PROTO((void));
417 static void pad_to_arg_alignment PROTO((struct args_size *, int));
418 #ifndef ARGS_GROW_DOWNWARD
419 static void pad_below PROTO((struct args_size *, enum machine_mode,
420 tree));
421 #endif
422 static tree round_down PROTO((tree, int));
423 static rtx round_trampoline_addr PROTO((rtx));
424 static tree blocks_nreverse PROTO((tree));
425 static int all_blocks PROTO((tree, tree *));
426 static int *record_insns PROTO((rtx));
427 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
428 static int contains PROTO((rtx, int *));
429 #endif /* HAVE_prologue || HAVE_epilogue */
430 static void put_addressof_into_stack PROTO((rtx));
431 static void purge_addressof_1 PROTO((rtx *, rtx, int));
432 \f
433 /* Pointer to chain of `struct function' for containing functions. */
434 struct function *outer_function_chain;
435
436 /* Given a function decl for a containing function,
437 return the `struct function' for it. */
438
439 struct function *
440 find_function_data (decl)
441 tree decl;
442 {
443 struct function *p;
444 for (p = outer_function_chain; p; p = p->next)
445 if (p->decl == decl)
446 return p;
447 abort ();
448 }
449
450 /* Save the current context for compilation of a nested function.
451 This is called from language-specific code.
452 The caller is responsible for saving any language-specific status,
453 since this function knows only about language-independent variables. */
454
455 void
456 push_function_context_to (context)
457 tree context;
458 {
459 struct function *p = (struct function *) xmalloc (sizeof (struct function));
460
461 p->next = outer_function_chain;
462 outer_function_chain = p;
463
464 p->name = current_function_name;
465 p->decl = current_function_decl;
466 p->pops_args = current_function_pops_args;
467 p->returns_struct = current_function_returns_struct;
468 p->returns_pcc_struct = current_function_returns_pcc_struct;
469 p->returns_pointer = current_function_returns_pointer;
470 p->needs_context = current_function_needs_context;
471 p->calls_setjmp = current_function_calls_setjmp;
472 p->calls_longjmp = current_function_calls_longjmp;
473 p->calls_alloca = current_function_calls_alloca;
474 p->has_nonlocal_label = current_function_has_nonlocal_label;
475 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
476 p->contains_functions = current_function_contains_functions;
477 p->is_thunk = current_function_is_thunk;
478 p->args_size = current_function_args_size;
479 p->pretend_args_size = current_function_pretend_args_size;
480 p->arg_offset_rtx = current_function_arg_offset_rtx;
481 p->varargs = current_function_varargs;
482 p->stdarg = current_function_stdarg;
483 p->uses_const_pool = current_function_uses_const_pool;
484 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
485 p->internal_arg_pointer = current_function_internal_arg_pointer;
486 p->max_parm_reg = max_parm_reg;
487 p->parm_reg_stack_loc = parm_reg_stack_loc;
488 p->outgoing_args_size = current_function_outgoing_args_size;
489 p->return_rtx = current_function_return_rtx;
490 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
491 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
492 p->nonlocal_labels = nonlocal_labels;
493 p->cleanup_label = cleanup_label;
494 p->return_label = return_label;
495 p->save_expr_regs = save_expr_regs;
496 p->stack_slot_list = stack_slot_list;
497 p->parm_birth_insn = parm_birth_insn;
498 p->frame_offset = frame_offset;
499 p->tail_recursion_label = tail_recursion_label;
500 p->tail_recursion_reentry = tail_recursion_reentry;
501 p->arg_pointer_save_area = arg_pointer_save_area;
502 p->rtl_expr_chain = rtl_expr_chain;
503 p->last_parm_insn = last_parm_insn;
504 p->context_display = context_display;
505 p->trampoline_list = trampoline_list;
506 p->function_call_count = function_call_count;
507 p->temp_slots = temp_slots;
508 p->temp_slot_level = temp_slot_level;
509 p->fixup_var_refs_queue = 0;
510 p->epilogue_delay_list = current_function_epilogue_delay_list;
511 p->args_info = current_function_args_info;
512
513 save_tree_status (p, context);
514 save_storage_status (p);
515 save_emit_status (p);
516 save_expr_status (p);
517 save_stmt_status (p);
518 save_varasm_status (p, context);
519
520 if (save_machine_status)
521 (*save_machine_status) (p);
522 }
523
524 void
525 push_function_context ()
526 {
527 push_function_context_to (current_function_decl);
528 }
529
530 /* Restore the last saved context, at the end of a nested function.
531 This function is called from language-specific code. */
532
533 void
534 pop_function_context_from (context)
535 tree context;
536 {
537 struct function *p = outer_function_chain;
538
539 outer_function_chain = p->next;
540
541 current_function_contains_functions
542 = p->contains_functions || p->inline_obstacks
543 || context == current_function_decl;
544 current_function_name = p->name;
545 current_function_decl = p->decl;
546 current_function_pops_args = p->pops_args;
547 current_function_returns_struct = p->returns_struct;
548 current_function_returns_pcc_struct = p->returns_pcc_struct;
549 current_function_returns_pointer = p->returns_pointer;
550 current_function_needs_context = p->needs_context;
551 current_function_calls_setjmp = p->calls_setjmp;
552 current_function_calls_longjmp = p->calls_longjmp;
553 current_function_calls_alloca = p->calls_alloca;
554 current_function_has_nonlocal_label = p->has_nonlocal_label;
555 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
556 current_function_is_thunk = p->is_thunk;
557 current_function_args_size = p->args_size;
558 current_function_pretend_args_size = p->pretend_args_size;
559 current_function_arg_offset_rtx = p->arg_offset_rtx;
560 current_function_varargs = p->varargs;
561 current_function_stdarg = p->stdarg;
562 current_function_uses_const_pool = p->uses_const_pool;
563 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
564 current_function_internal_arg_pointer = p->internal_arg_pointer;
565 max_parm_reg = p->max_parm_reg;
566 parm_reg_stack_loc = p->parm_reg_stack_loc;
567 current_function_outgoing_args_size = p->outgoing_args_size;
568 current_function_return_rtx = p->return_rtx;
569 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
570 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
571 nonlocal_labels = p->nonlocal_labels;
572 cleanup_label = p->cleanup_label;
573 return_label = p->return_label;
574 save_expr_regs = p->save_expr_regs;
575 stack_slot_list = p->stack_slot_list;
576 parm_birth_insn = p->parm_birth_insn;
577 frame_offset = p->frame_offset;
578 tail_recursion_label = p->tail_recursion_label;
579 tail_recursion_reentry = p->tail_recursion_reentry;
580 arg_pointer_save_area = p->arg_pointer_save_area;
581 rtl_expr_chain = p->rtl_expr_chain;
582 last_parm_insn = p->last_parm_insn;
583 context_display = p->context_display;
584 trampoline_list = p->trampoline_list;
585 function_call_count = p->function_call_count;
586 temp_slots = p->temp_slots;
587 temp_slot_level = p->temp_slot_level;
588 current_function_epilogue_delay_list = p->epilogue_delay_list;
589 reg_renumber = 0;
590 current_function_args_info = p->args_info;
591
592 restore_tree_status (p, context);
593 restore_storage_status (p);
594 restore_expr_status (p);
595 restore_emit_status (p);
596 restore_stmt_status (p);
597 restore_varasm_status (p);
598
599 if (restore_machine_status)
600 (*restore_machine_status) (p);
601
602 /* Finish doing put_var_into_stack for any of our variables
603 which became addressable during the nested function. */
604 {
605 struct var_refs_queue *queue = p->fixup_var_refs_queue;
606 for (; queue; queue = queue->next)
607 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
608 }
609
610 free (p);
611
612 /* Reset variables that have known state during rtx generation. */
613 rtx_equal_function_value_matters = 1;
614 virtuals_instantiated = 0;
615 }
616
617 void pop_function_context ()
618 {
619 pop_function_context_from (current_function_decl);
620 }
621 \f
622 /* Allocate fixed slots in the stack frame of the current function. */
623
624 /* Return size needed for stack frame based on slots so far allocated.
625 This size counts from zero. It is not rounded to STACK_BOUNDARY;
626 the caller may have to do that. */
627
628 HOST_WIDE_INT
629 get_frame_size ()
630 {
631 #ifdef FRAME_GROWS_DOWNWARD
632 return -frame_offset;
633 #else
634 return frame_offset;
635 #endif
636 }
637
638 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
639 with machine mode MODE.
640
641 ALIGN controls the amount of alignment for the address of the slot:
642 0 means according to MODE,
643 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
644 positive specifies alignment boundary in bits.
645
646 We do not round to stack_boundary here. */
647
648 rtx
649 assign_stack_local (mode, size, align)
650 enum machine_mode mode;
651 int size;
652 int align;
653 {
654 register rtx x, addr;
655 int bigend_correction = 0;
656 int alignment;
657
658 if (align == 0)
659 {
660 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
661 if (mode == BLKmode)
662 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
663 }
664 else if (align == -1)
665 {
666 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
667 size = CEIL_ROUND (size, alignment);
668 }
669 else
670 alignment = align / BITS_PER_UNIT;
671
672 /* Round frame offset to that alignment.
673 We must be careful here, since FRAME_OFFSET might be negative and
674 division with a negative dividend isn't as well defined as we might
675 like. So we instead assume that ALIGNMENT is a power of two and
676 use logical operations which are unambiguous. */
677 #ifdef FRAME_GROWS_DOWNWARD
678 frame_offset = FLOOR_ROUND (frame_offset, alignment);
679 #else
680 frame_offset = CEIL_ROUND (frame_offset, alignment);
681 #endif
682
683 /* On a big-endian machine, if we are allocating more space than we will use,
684 use the least significant bytes of those that are allocated. */
685 if (BYTES_BIG_ENDIAN && mode != BLKmode)
686 bigend_correction = size - GET_MODE_SIZE (mode);
687
688 #ifdef FRAME_GROWS_DOWNWARD
689 frame_offset -= size;
690 #endif
691
692 /* If we have already instantiated virtual registers, return the actual
693 address relative to the frame pointer. */
694 if (virtuals_instantiated)
695 addr = plus_constant (frame_pointer_rtx,
696 (frame_offset + bigend_correction
697 + STARTING_FRAME_OFFSET));
698 else
699 addr = plus_constant (virtual_stack_vars_rtx,
700 frame_offset + bigend_correction);
701
702 #ifndef FRAME_GROWS_DOWNWARD
703 frame_offset += size;
704 #endif
705
706 x = gen_rtx_MEM (mode, addr);
707
708 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
709
710 return x;
711 }
712
713 /* Assign a stack slot in a containing function.
714 First three arguments are same as in preceding function.
715 The last argument specifies the function to allocate in. */
716
717 rtx
718 assign_outer_stack_local (mode, size, align, function)
719 enum machine_mode mode;
720 int size;
721 int align;
722 struct function *function;
723 {
724 register rtx x, addr;
725 int bigend_correction = 0;
726 int alignment;
727
728 /* Allocate in the memory associated with the function in whose frame
729 we are assigning. */
730 push_obstacks (function->function_obstack,
731 function->function_maybepermanent_obstack);
732
733 if (align == 0)
734 {
735 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
736 if (mode == BLKmode)
737 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
738 }
739 else if (align == -1)
740 {
741 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
742 size = CEIL_ROUND (size, alignment);
743 }
744 else
745 alignment = align / BITS_PER_UNIT;
746
747 /* Round frame offset to that alignment. */
748 #ifdef FRAME_GROWS_DOWNWARD
749 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
750 #else
751 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
752 #endif
753
754 /* On a big-endian machine, if we are allocating more space than we will use,
755 use the least significant bytes of those that are allocated. */
756 if (BYTES_BIG_ENDIAN && mode != BLKmode)
757 bigend_correction = size - GET_MODE_SIZE (mode);
758
759 #ifdef FRAME_GROWS_DOWNWARD
760 function->frame_offset -= size;
761 #endif
762 addr = plus_constant (virtual_stack_vars_rtx,
763 function->frame_offset + bigend_correction);
764 #ifndef FRAME_GROWS_DOWNWARD
765 function->frame_offset += size;
766 #endif
767
768 x = gen_rtx_MEM (mode, addr);
769
770 function->stack_slot_list
771 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
772
773 pop_obstacks ();
774
775 return x;
776 }
777 \f
778 /* Allocate a temporary stack slot and record it for possible later
779 reuse.
780
781 MODE is the machine mode to be given to the returned rtx.
782
783 SIZE is the size in units of the space required. We do no rounding here
784 since assign_stack_local will do any required rounding.
785
786 KEEP is 1 if this slot is to be retained after a call to
787 free_temp_slots. Automatic variables for a block are allocated
788 with this flag. KEEP is 2, if we allocate a longer term temporary,
789 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
790
791 rtx
792 assign_stack_temp (mode, size, keep)
793 enum machine_mode mode;
794 int size;
795 int keep;
796 {
797 struct temp_slot *p, *best_p = 0;
798
799 /* If SIZE is -1 it means that somebody tried to allocate a temporary
800 of a variable size. */
801 if (size == -1)
802 abort ();
803
804 /* First try to find an available, already-allocated temporary that is the
805 exact size we require. */
806 for (p = temp_slots; p; p = p->next)
807 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
808 break;
809
810 /* If we didn't find, one, try one that is larger than what we want. We
811 find the smallest such. */
812 if (p == 0)
813 for (p = temp_slots; p; p = p->next)
814 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
815 && (best_p == 0 || best_p->size > p->size))
816 best_p = p;
817
818 /* Make our best, if any, the one to use. */
819 if (best_p)
820 {
821 /* If there are enough aligned bytes left over, make them into a new
822 temp_slot so that the extra bytes don't get wasted. Do this only
823 for BLKmode slots, so that we can be sure of the alignment. */
824 if (GET_MODE (best_p->slot) == BLKmode)
825 {
826 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
827 int rounded_size = CEIL_ROUND (size, alignment);
828
829 if (best_p->size - rounded_size >= alignment)
830 {
831 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
832 p->in_use = p->addr_taken = 0;
833 p->size = best_p->size - rounded_size;
834 p->base_offset = best_p->base_offset + rounded_size;
835 p->full_size = best_p->full_size - rounded_size;
836 p->slot = gen_rtx_MEM (BLKmode,
837 plus_constant (XEXP (best_p->slot, 0),
838 rounded_size));
839 p->address = 0;
840 p->rtl_expr = 0;
841 p->next = temp_slots;
842 temp_slots = p;
843
844 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
845 stack_slot_list);
846
847 best_p->size = rounded_size;
848 best_p->full_size = rounded_size;
849 }
850 }
851
852 p = best_p;
853 }
854
855 /* If we still didn't find one, make a new temporary. */
856 if (p == 0)
857 {
858 int frame_offset_old = frame_offset;
859 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
860 /* If the temp slot mode doesn't indicate the alignment,
861 use the largest possible, so no one will be disappointed. */
862 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
863 /* The following slot size computation is necessary because we don't
864 know the actual size of the temporary slot until assign_stack_local
865 has performed all the frame alignment and size rounding for the
866 requested temporary. Note that extra space added for alignment
867 can be either above or below this stack slot depending on which
868 way the frame grows. We include the extra space if and only if it
869 is above this slot. */
870 #ifdef FRAME_GROWS_DOWNWARD
871 p->size = frame_offset_old - frame_offset;
872 #else
873 p->size = size;
874 #endif
875 /* Now define the fields used by combine_temp_slots. */
876 #ifdef FRAME_GROWS_DOWNWARD
877 p->base_offset = frame_offset;
878 p->full_size = frame_offset_old - frame_offset;
879 #else
880 p->base_offset = frame_offset_old;
881 p->full_size = frame_offset - frame_offset_old;
882 #endif
883 p->address = 0;
884 p->next = temp_slots;
885 temp_slots = p;
886 }
887
888 p->in_use = 1;
889 p->addr_taken = 0;
890 p->rtl_expr = sequence_rtl_expr;
891
892 if (keep == 2)
893 {
894 p->level = target_temp_slot_level;
895 p->keep = 0;
896 }
897 else
898 {
899 p->level = temp_slot_level;
900 p->keep = keep;
901 }
902
903 /* We may be reusing an old slot, so clear any MEM flags that may have been
904 set from before. */
905 RTX_UNCHANGING_P (p->slot) = 0;
906 MEM_IN_STRUCT_P (p->slot) = 0;
907 return p->slot;
908 }
909 \f
910 /* Assign a temporary of given TYPE.
911 KEEP is as for assign_stack_temp.
912 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
913 it is 0 if a register is OK.
914 DONT_PROMOTE is 1 if we should not promote values in register
915 to wider modes. */
916
917 rtx
918 assign_temp (type, keep, memory_required, dont_promote)
919 tree type;
920 int keep;
921 int memory_required;
922 int dont_promote;
923 {
924 enum machine_mode mode = TYPE_MODE (type);
925 int unsignedp = TREE_UNSIGNED (type);
926
927 if (mode == BLKmode || memory_required)
928 {
929 int size = int_size_in_bytes (type);
930 rtx tmp;
931
932 /* Unfortunately, we don't yet know how to allocate variable-sized
933 temporaries. However, sometimes we have a fixed upper limit on
934 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
935 instead. This is the case for Chill variable-sized strings. */
936 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
937 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
938 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
939 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
940
941 tmp = assign_stack_temp (mode, size, keep);
942 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
943 return tmp;
944 }
945
946 #ifndef PROMOTE_FOR_CALL_ONLY
947 if (! dont_promote)
948 mode = promote_mode (type, mode, &unsignedp, 0);
949 #endif
950
951 return gen_reg_rtx (mode);
952 }
953 \f
954 /* Combine temporary stack slots which are adjacent on the stack.
955
956 This allows for better use of already allocated stack space. This is only
957 done for BLKmode slots because we can be sure that we won't have alignment
958 problems in this case. */
959
960 void
961 combine_temp_slots ()
962 {
963 struct temp_slot *p, *q;
964 struct temp_slot *prev_p, *prev_q;
965 /* Determine where to free back to after this function. */
966 rtx free_pointer = rtx_alloc (CONST_INT);
967
968 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
969 {
970 int delete_p = 0;
971 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
972 for (q = p->next, prev_q = p; q; q = prev_q->next)
973 {
974 int delete_q = 0;
975 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
976 {
977 if (p->base_offset + p->full_size == q->base_offset)
978 {
979 /* Q comes after P; combine Q into P. */
980 p->size += q->size;
981 p->full_size += q->full_size;
982 delete_q = 1;
983 }
984 else if (q->base_offset + q->full_size == p->base_offset)
985 {
986 /* P comes after Q; combine P into Q. */
987 q->size += p->size;
988 q->full_size += p->full_size;
989 delete_p = 1;
990 break;
991 }
992 }
993 /* Either delete Q or advance past it. */
994 if (delete_q)
995 prev_q->next = q->next;
996 else
997 prev_q = q;
998 }
999 /* Either delete P or advance past it. */
1000 if (delete_p)
1001 {
1002 if (prev_p)
1003 prev_p->next = p->next;
1004 else
1005 temp_slots = p->next;
1006 }
1007 else
1008 prev_p = p;
1009 }
1010
1011 /* Free all the RTL made by plus_constant. */
1012 rtx_free (free_pointer);
1013 }
1014 \f
1015 /* Find the temp slot corresponding to the object at address X. */
1016
1017 static struct temp_slot *
1018 find_temp_slot_from_address (x)
1019 rtx x;
1020 {
1021 struct temp_slot *p;
1022 rtx next;
1023
1024 for (p = temp_slots; p; p = p->next)
1025 {
1026 if (! p->in_use)
1027 continue;
1028 else if (XEXP (p->slot, 0) == x
1029 || p->address == x
1030 || (GET_CODE (x) == PLUS
1031 && XEXP (x, 0) == virtual_stack_vars_rtx
1032 && GET_CODE (XEXP (x, 1)) == CONST_INT
1033 && INTVAL (XEXP (x, 1)) >= p->base_offset
1034 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1035 return p;
1036
1037 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1038 for (next = p->address; next; next = XEXP (next, 1))
1039 if (XEXP (next, 0) == x)
1040 return p;
1041 }
1042
1043 return 0;
1044 }
1045
1046 /* Indicate that NEW is an alternate way of referring to the temp slot
1047 that previous was known by OLD. */
1048
1049 void
1050 update_temp_slot_address (old, new)
1051 rtx old, new;
1052 {
1053 struct temp_slot *p = find_temp_slot_from_address (old);
1054
1055 /* If none, return. Else add NEW as an alias. */
1056 if (p == 0)
1057 return;
1058 else if (p->address == 0)
1059 p->address = new;
1060 else
1061 {
1062 if (GET_CODE (p->address) != EXPR_LIST)
1063 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1064
1065 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1066 }
1067 }
1068
1069 /* If X could be a reference to a temporary slot, mark the fact that its
1070 address was taken. */
1071
1072 void
1073 mark_temp_addr_taken (x)
1074 rtx x;
1075 {
1076 struct temp_slot *p;
1077
1078 if (x == 0)
1079 return;
1080
1081 /* If X is not in memory or is at a constant address, it cannot be in
1082 a temporary slot. */
1083 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1084 return;
1085
1086 p = find_temp_slot_from_address (XEXP (x, 0));
1087 if (p != 0)
1088 p->addr_taken = 1;
1089 }
1090
1091 /* If X could be a reference to a temporary slot, mark that slot as
1092 belonging to the to one level higher than the current level. If X
1093 matched one of our slots, just mark that one. Otherwise, we can't
1094 easily predict which it is, so upgrade all of them. Kept slots
1095 need not be touched.
1096
1097 This is called when an ({...}) construct occurs and a statement
1098 returns a value in memory. */
1099
1100 void
1101 preserve_temp_slots (x)
1102 rtx x;
1103 {
1104 struct temp_slot *p = 0;
1105
1106 /* If there is no result, we still might have some objects whose address
1107 were taken, so we need to make sure they stay around. */
1108 if (x == 0)
1109 {
1110 for (p = temp_slots; p; p = p->next)
1111 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1112 p->level--;
1113
1114 return;
1115 }
1116
1117 /* If X is a register that is being used as a pointer, see if we have
1118 a temporary slot we know it points to. To be consistent with
1119 the code below, we really should preserve all non-kept slots
1120 if we can't find a match, but that seems to be much too costly. */
1121 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1122 p = find_temp_slot_from_address (x);
1123
1124 /* If X is not in memory or is at a constant address, it cannot be in
1125 a temporary slot, but it can contain something whose address was
1126 taken. */
1127 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1128 {
1129 for (p = temp_slots; p; p = p->next)
1130 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1131 p->level--;
1132
1133 return;
1134 }
1135
1136 /* First see if we can find a match. */
1137 if (p == 0)
1138 p = find_temp_slot_from_address (XEXP (x, 0));
1139
1140 if (p != 0)
1141 {
1142 /* Move everything at our level whose address was taken to our new
1143 level in case we used its address. */
1144 struct temp_slot *q;
1145
1146 if (p->level == temp_slot_level)
1147 {
1148 for (q = temp_slots; q; q = q->next)
1149 if (q != p && q->addr_taken && q->level == p->level)
1150 q->level--;
1151
1152 p->level--;
1153 p->addr_taken = 0;
1154 }
1155 return;
1156 }
1157
1158 /* Otherwise, preserve all non-kept slots at this level. */
1159 for (p = temp_slots; p; p = p->next)
1160 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1161 p->level--;
1162 }
1163
1164 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1165 with that RTL_EXPR, promote it into a temporary slot at the present
1166 level so it will not be freed when we free slots made in the
1167 RTL_EXPR. */
1168
1169 void
1170 preserve_rtl_expr_result (x)
1171 rtx x;
1172 {
1173 struct temp_slot *p;
1174
1175 /* If X is not in memory or is at a constant address, it cannot be in
1176 a temporary slot. */
1177 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1178 return;
1179
1180 /* If we can find a match, move it to our level unless it is already at
1181 an upper level. */
1182 p = find_temp_slot_from_address (XEXP (x, 0));
1183 if (p != 0)
1184 {
1185 p->level = MIN (p->level, temp_slot_level);
1186 p->rtl_expr = 0;
1187 }
1188
1189 return;
1190 }
1191
1192 /* Free all temporaries used so far. This is normally called at the end
1193 of generating code for a statement. Don't free any temporaries
1194 currently in use for an RTL_EXPR that hasn't yet been emitted.
1195 We could eventually do better than this since it can be reused while
1196 generating the same RTL_EXPR, but this is complex and probably not
1197 worthwhile. */
1198
1199 void
1200 free_temp_slots ()
1201 {
1202 struct temp_slot *p;
1203
1204 for (p = temp_slots; p; p = p->next)
1205 if (p->in_use && p->level == temp_slot_level && ! p->keep
1206 && p->rtl_expr == 0)
1207 p->in_use = 0;
1208
1209 combine_temp_slots ();
1210 }
1211
1212 /* Free all temporary slots used in T, an RTL_EXPR node. */
1213
1214 void
1215 free_temps_for_rtl_expr (t)
1216 tree t;
1217 {
1218 struct temp_slot *p;
1219
1220 for (p = temp_slots; p; p = p->next)
1221 if (p->rtl_expr == t)
1222 p->in_use = 0;
1223
1224 combine_temp_slots ();
1225 }
1226
1227 /* Mark all temporaries ever allocated in this function as not suitable
1228 for reuse until the current level is exited. */
1229
1230 void
1231 mark_all_temps_used ()
1232 {
1233 struct temp_slot *p;
1234
1235 for (p = temp_slots; p; p = p->next)
1236 {
1237 p->in_use = p->keep = 1;
1238 p->level = MIN (p->level, temp_slot_level);
1239 }
1240 }
1241
1242 /* Push deeper into the nesting level for stack temporaries. */
1243
1244 void
1245 push_temp_slots ()
1246 {
1247 temp_slot_level++;
1248 }
1249
1250 /* Pop a temporary nesting level. All slots in use in the current level
1251 are freed. */
1252
1253 void
1254 pop_temp_slots ()
1255 {
1256 struct temp_slot *p;
1257
1258 for (p = temp_slots; p; p = p->next)
1259 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1260 p->in_use = 0;
1261
1262 combine_temp_slots ();
1263
1264 temp_slot_level--;
1265 }
1266
1267 /* Initialize temporary slots. */
1268
1269 void
1270 init_temp_slots ()
1271 {
1272 /* We have not allocated any temporaries yet. */
1273 temp_slots = 0;
1274 temp_slot_level = 0;
1275 target_temp_slot_level = 0;
1276 }
1277 \f
1278 /* Retroactively move an auto variable from a register to a stack slot.
1279 This is done when an address-reference to the variable is seen. */
1280
1281 void
1282 put_var_into_stack (decl)
1283 tree decl;
1284 {
1285 register rtx reg;
1286 enum machine_mode promoted_mode, decl_mode;
1287 struct function *function = 0;
1288 tree context;
1289 int can_use_addressof;
1290
1291 context = decl_function_context (decl);
1292
1293 /* Get the current rtl used for this object and it's original mode. */
1294 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1295
1296 /* No need to do anything if decl has no rtx yet
1297 since in that case caller is setting TREE_ADDRESSABLE
1298 and a stack slot will be assigned when the rtl is made. */
1299 if (reg == 0)
1300 return;
1301
1302 /* Get the declared mode for this object. */
1303 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1304 : DECL_MODE (decl));
1305 /* Get the mode it's actually stored in. */
1306 promoted_mode = GET_MODE (reg);
1307
1308 /* If this variable comes from an outer function,
1309 find that function's saved context. */
1310 if (context != current_function_decl && context != inline_function_decl)
1311 for (function = outer_function_chain; function; function = function->next)
1312 if (function->decl == context)
1313 break;
1314
1315 /* If this is a variable-size object with a pseudo to address it,
1316 put that pseudo into the stack, if the var is nonlocal. */
1317 if (DECL_NONLOCAL (decl)
1318 && GET_CODE (reg) == MEM
1319 && GET_CODE (XEXP (reg, 0)) == REG
1320 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1321 {
1322 reg = XEXP (reg, 0);
1323 decl_mode = promoted_mode = GET_MODE (reg);
1324 }
1325
1326 can_use_addressof
1327 = (function == 0
1328 /* FIXME make it work for promoted modes too */
1329 && decl_mode == promoted_mode
1330 #ifdef NON_SAVING_SETJMP
1331 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1332 #endif
1333 );
1334
1335 /* If we can't use ADDRESSOF, make sure we see through one we already
1336 generated. */
1337 if (! can_use_addressof && GET_CODE (reg) == MEM
1338 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1339 reg = XEXP (XEXP (reg, 0), 0);
1340
1341 /* Now we should have a value that resides in one or more pseudo regs. */
1342
1343 if (GET_CODE (reg) == REG)
1344 {
1345 /* If this variable lives in the current function and we don't need
1346 to put things in the stack for the sake of setjmp, try to keep it
1347 in a register until we know we actually need the address. */
1348 if (can_use_addressof)
1349 gen_mem_addressof (reg, decl);
1350 else
1351 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1352 promoted_mode, decl_mode,
1353 TREE_SIDE_EFFECTS (decl), 0);
1354 }
1355 else if (GET_CODE (reg) == CONCAT)
1356 {
1357 /* A CONCAT contains two pseudos; put them both in the stack.
1358 We do it so they end up consecutive. */
1359 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1360 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1361 #ifdef FRAME_GROWS_DOWNWARD
1362 /* Since part 0 should have a lower address, do it second. */
1363 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1364 part_mode, TREE_SIDE_EFFECTS (decl), 0);
1365 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1366 part_mode, TREE_SIDE_EFFECTS (decl), 0);
1367 #else
1368 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1369 part_mode, TREE_SIDE_EFFECTS (decl), 0);
1370 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1371 part_mode, TREE_SIDE_EFFECTS (decl), 0);
1372 #endif
1373
1374 /* Change the CONCAT into a combined MEM for both parts. */
1375 PUT_CODE (reg, MEM);
1376 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1377
1378 /* The two parts are in memory order already.
1379 Use the lower parts address as ours. */
1380 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1381 /* Prevent sharing of rtl that might lose. */
1382 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1383 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1384 }
1385 else
1386 return;
1387
1388 if (flag_check_memory_usage)
1389 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1390 XEXP (reg, 0), ptr_mode,
1391 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1392 TYPE_MODE (sizetype),
1393 GEN_INT (MEMORY_USE_RW),
1394 TYPE_MODE (integer_type_node));
1395 }
1396
1397 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1398 into the stack frame of FUNCTION (0 means the current function).
1399 DECL_MODE is the machine mode of the user-level data type.
1400 PROMOTED_MODE is the machine mode of the register.
1401 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1402
1403 static void
1404 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1405 original_regno)
1406 struct function *function;
1407 rtx reg;
1408 tree type;
1409 enum machine_mode promoted_mode, decl_mode;
1410 int volatile_p;
1411 int original_regno;
1412 {
1413 rtx new = 0;
1414 int regno = original_regno;
1415
1416 if (regno == 0)
1417 regno = REGNO (reg);
1418
1419 if (function)
1420 {
1421 if (regno < function->max_parm_reg)
1422 new = function->parm_reg_stack_loc[regno];
1423 if (new == 0)
1424 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1425 0, function);
1426 }
1427 else
1428 {
1429 if (regno < max_parm_reg)
1430 new = parm_reg_stack_loc[regno];
1431 if (new == 0)
1432 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1433 }
1434
1435 PUT_MODE (reg, decl_mode);
1436 XEXP (reg, 0) = XEXP (new, 0);
1437 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1438 MEM_VOLATILE_P (reg) = volatile_p;
1439 PUT_CODE (reg, MEM);
1440
1441 /* If this is a memory ref that contains aggregate components,
1442 mark it as such for cse and loop optimize. If we are reusing a
1443 previously generated stack slot, then we need to copy the bit in
1444 case it was set for other reasons. For instance, it is set for
1445 __builtin_va_alist. */
1446 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type) | MEM_IN_STRUCT_P (new);
1447
1448 /* Now make sure that all refs to the variable, previously made
1449 when it was a register, are fixed up to be valid again. */
1450 if (function)
1451 {
1452 struct var_refs_queue *temp;
1453
1454 /* Variable is inherited; fix it up when we get back to its function. */
1455 push_obstacks (function->function_obstack,
1456 function->function_maybepermanent_obstack);
1457
1458 /* See comment in restore_tree_status in tree.c for why this needs to be
1459 on saveable obstack. */
1460 temp
1461 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1462 temp->modified = reg;
1463 temp->promoted_mode = promoted_mode;
1464 temp->unsignedp = TREE_UNSIGNED (type);
1465 temp->next = function->fixup_var_refs_queue;
1466 function->fixup_var_refs_queue = temp;
1467 pop_obstacks ();
1468 }
1469 else
1470 /* Variable is local; fix it up now. */
1471 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1472 }
1473 \f
1474 static void
1475 fixup_var_refs (var, promoted_mode, unsignedp)
1476 rtx var;
1477 enum machine_mode promoted_mode;
1478 int unsignedp;
1479 {
1480 tree pending;
1481 rtx first_insn = get_insns ();
1482 struct sequence_stack *stack = sequence_stack;
1483 tree rtl_exps = rtl_expr_chain;
1484
1485 /* Must scan all insns for stack-refs that exceed the limit. */
1486 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1487
1488 /* Scan all pending sequences too. */
1489 for (; stack; stack = stack->next)
1490 {
1491 push_to_sequence (stack->first);
1492 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1493 stack->first, stack->next != 0);
1494 /* Update remembered end of sequence
1495 in case we added an insn at the end. */
1496 stack->last = get_last_insn ();
1497 end_sequence ();
1498 }
1499
1500 /* Scan all waiting RTL_EXPRs too. */
1501 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1502 {
1503 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1504 if (seq != const0_rtx && seq != 0)
1505 {
1506 push_to_sequence (seq);
1507 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1508 end_sequence ();
1509 }
1510 }
1511 }
1512 \f
1513 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1514 some part of an insn. Return a struct fixup_replacement whose OLD
1515 value is equal to X. Allocate a new structure if no such entry exists. */
1516
1517 static struct fixup_replacement *
1518 find_fixup_replacement (replacements, x)
1519 struct fixup_replacement **replacements;
1520 rtx x;
1521 {
1522 struct fixup_replacement *p;
1523
1524 /* See if we have already replaced this. */
1525 for (p = *replacements; p && p->old != x; p = p->next)
1526 ;
1527
1528 if (p == 0)
1529 {
1530 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1531 p->old = x;
1532 p->new = 0;
1533 p->next = *replacements;
1534 *replacements = p;
1535 }
1536
1537 return p;
1538 }
1539
1540 /* Scan the insn-chain starting with INSN for refs to VAR
1541 and fix them up. TOPLEVEL is nonzero if this chain is the
1542 main chain of insns for the current function. */
1543
1544 static void
1545 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1546 rtx var;
1547 enum machine_mode promoted_mode;
1548 int unsignedp;
1549 rtx insn;
1550 int toplevel;
1551 {
1552 rtx call_dest = 0;
1553
1554 while (insn)
1555 {
1556 rtx next = NEXT_INSN (insn);
1557 rtx note;
1558 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1559 {
1560 /* If this is a CLOBBER of VAR, delete it.
1561
1562 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1563 and REG_RETVAL notes too. */
1564 if (GET_CODE (PATTERN (insn)) == CLOBBER
1565 && XEXP (PATTERN (insn), 0) == var)
1566 {
1567 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1568 /* The REG_LIBCALL note will go away since we are going to
1569 turn INSN into a NOTE, so just delete the
1570 corresponding REG_RETVAL note. */
1571 remove_note (XEXP (note, 0),
1572 find_reg_note (XEXP (note, 0), REG_RETVAL,
1573 NULL_RTX));
1574
1575 /* In unoptimized compilation, we shouldn't call delete_insn
1576 except in jump.c doing warnings. */
1577 PUT_CODE (insn, NOTE);
1578 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1579 NOTE_SOURCE_FILE (insn) = 0;
1580 }
1581
1582 /* The insn to load VAR from a home in the arglist
1583 is now a no-op. When we see it, just delete it. */
1584 else if (toplevel
1585 && GET_CODE (PATTERN (insn)) == SET
1586 && SET_DEST (PATTERN (insn)) == var
1587 /* If this represents the result of an insn group,
1588 don't delete the insn. */
1589 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1590 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1591 {
1592 /* In unoptimized compilation, we shouldn't call delete_insn
1593 except in jump.c doing warnings. */
1594 PUT_CODE (insn, NOTE);
1595 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1596 NOTE_SOURCE_FILE (insn) = 0;
1597 if (insn == last_parm_insn)
1598 last_parm_insn = PREV_INSN (next);
1599 }
1600 else
1601 {
1602 struct fixup_replacement *replacements = 0;
1603 rtx next_insn = NEXT_INSN (insn);
1604
1605 if (SMALL_REGISTER_CLASSES)
1606 {
1607 /* If the insn that copies the results of a CALL_INSN
1608 into a pseudo now references VAR, we have to use an
1609 intermediate pseudo since we want the life of the
1610 return value register to be only a single insn.
1611
1612 If we don't use an intermediate pseudo, such things as
1613 address computations to make the address of VAR valid
1614 if it is not can be placed between the CALL_INSN and INSN.
1615
1616 To make sure this doesn't happen, we record the destination
1617 of the CALL_INSN and see if the next insn uses both that
1618 and VAR. */
1619
1620 if (call_dest != 0 && GET_CODE (insn) == INSN
1621 && reg_mentioned_p (var, PATTERN (insn))
1622 && reg_mentioned_p (call_dest, PATTERN (insn)))
1623 {
1624 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1625
1626 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1627
1628 PATTERN (insn) = replace_rtx (PATTERN (insn),
1629 call_dest, temp);
1630 }
1631
1632 if (GET_CODE (insn) == CALL_INSN
1633 && GET_CODE (PATTERN (insn)) == SET)
1634 call_dest = SET_DEST (PATTERN (insn));
1635 else if (GET_CODE (insn) == CALL_INSN
1636 && GET_CODE (PATTERN (insn)) == PARALLEL
1637 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1638 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1639 else
1640 call_dest = 0;
1641 }
1642
1643 /* See if we have to do anything to INSN now that VAR is in
1644 memory. If it needs to be loaded into a pseudo, use a single
1645 pseudo for the entire insn in case there is a MATCH_DUP
1646 between two operands. We pass a pointer to the head of
1647 a list of struct fixup_replacements. If fixup_var_refs_1
1648 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1649 it will record them in this list.
1650
1651 If it allocated a pseudo for any replacement, we copy into
1652 it here. */
1653
1654 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1655 &replacements);
1656
1657 /* If this is last_parm_insn, and any instructions were output
1658 after it to fix it up, then we must set last_parm_insn to
1659 the last such instruction emitted. */
1660 if (insn == last_parm_insn)
1661 last_parm_insn = PREV_INSN (next_insn);
1662
1663 while (replacements)
1664 {
1665 if (GET_CODE (replacements->new) == REG)
1666 {
1667 rtx insert_before;
1668 rtx seq;
1669
1670 /* OLD might be a (subreg (mem)). */
1671 if (GET_CODE (replacements->old) == SUBREG)
1672 replacements->old
1673 = fixup_memory_subreg (replacements->old, insn, 0);
1674 else
1675 replacements->old
1676 = fixup_stack_1 (replacements->old, insn);
1677
1678 insert_before = insn;
1679
1680 /* If we are changing the mode, do a conversion.
1681 This might be wasteful, but combine.c will
1682 eliminate much of the waste. */
1683
1684 if (GET_MODE (replacements->new)
1685 != GET_MODE (replacements->old))
1686 {
1687 start_sequence ();
1688 convert_move (replacements->new,
1689 replacements->old, unsignedp);
1690 seq = gen_sequence ();
1691 end_sequence ();
1692 }
1693 else
1694 seq = gen_move_insn (replacements->new,
1695 replacements->old);
1696
1697 emit_insn_before (seq, insert_before);
1698 }
1699
1700 replacements = replacements->next;
1701 }
1702 }
1703
1704 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1705 But don't touch other insns referred to by reg-notes;
1706 we will get them elsewhere. */
1707 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1708 if (GET_CODE (note) != INSN_LIST)
1709 XEXP (note, 0)
1710 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1711 }
1712 insn = next;
1713 }
1714 }
1715 \f
1716 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1717 See if the rtx expression at *LOC in INSN needs to be changed.
1718
1719 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1720 contain a list of original rtx's and replacements. If we find that we need
1721 to modify this insn by replacing a memory reference with a pseudo or by
1722 making a new MEM to implement a SUBREG, we consult that list to see if
1723 we have already chosen a replacement. If none has already been allocated,
1724 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1725 or the SUBREG, as appropriate, to the pseudo. */
1726
1727 static void
1728 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1729 register rtx var;
1730 enum machine_mode promoted_mode;
1731 register rtx *loc;
1732 rtx insn;
1733 struct fixup_replacement **replacements;
1734 {
1735 register int i;
1736 register rtx x = *loc;
1737 RTX_CODE code = GET_CODE (x);
1738 register char *fmt;
1739 register rtx tem, tem1;
1740 struct fixup_replacement *replacement;
1741
1742 switch (code)
1743 {
1744 case ADDRESSOF:
1745 if (XEXP (x, 0) == var)
1746 {
1747 /* Prevent sharing of rtl that might lose. */
1748 rtx sub = copy_rtx (XEXP (var, 0));
1749
1750 start_sequence ();
1751
1752 if (! validate_change (insn, loc, sub, 0))
1753 {
1754 rtx y = force_operand (sub, NULL_RTX);
1755
1756 if (! validate_change (insn, loc, y, 0))
1757 *loc = copy_to_reg (y);
1758 }
1759
1760 emit_insn_before (gen_sequence (), insn);
1761 end_sequence ();
1762 }
1763 return;
1764
1765 case MEM:
1766 if (var == x)
1767 {
1768 /* If we already have a replacement, use it. Otherwise,
1769 try to fix up this address in case it is invalid. */
1770
1771 replacement = find_fixup_replacement (replacements, var);
1772 if (replacement->new)
1773 {
1774 *loc = replacement->new;
1775 return;
1776 }
1777
1778 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1779
1780 /* Unless we are forcing memory to register or we changed the mode,
1781 we can leave things the way they are if the insn is valid. */
1782
1783 INSN_CODE (insn) = -1;
1784 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1785 && recog_memoized (insn) >= 0)
1786 return;
1787
1788 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1789 return;
1790 }
1791
1792 /* If X contains VAR, we need to unshare it here so that we update
1793 each occurrence separately. But all identical MEMs in one insn
1794 must be replaced with the same rtx because of the possibility of
1795 MATCH_DUPs. */
1796
1797 if (reg_mentioned_p (var, x))
1798 {
1799 replacement = find_fixup_replacement (replacements, x);
1800 if (replacement->new == 0)
1801 replacement->new = copy_most_rtx (x, var);
1802
1803 *loc = x = replacement->new;
1804 }
1805 break;
1806
1807 case REG:
1808 case CC0:
1809 case PC:
1810 case CONST_INT:
1811 case CONST:
1812 case SYMBOL_REF:
1813 case LABEL_REF:
1814 case CONST_DOUBLE:
1815 return;
1816
1817 case SIGN_EXTRACT:
1818 case ZERO_EXTRACT:
1819 /* Note that in some cases those types of expressions are altered
1820 by optimize_bit_field, and do not survive to get here. */
1821 if (XEXP (x, 0) == var
1822 || (GET_CODE (XEXP (x, 0)) == SUBREG
1823 && SUBREG_REG (XEXP (x, 0)) == var))
1824 {
1825 /* Get TEM as a valid MEM in the mode presently in the insn.
1826
1827 We don't worry about the possibility of MATCH_DUP here; it
1828 is highly unlikely and would be tricky to handle. */
1829
1830 tem = XEXP (x, 0);
1831 if (GET_CODE (tem) == SUBREG)
1832 {
1833 if (GET_MODE_BITSIZE (GET_MODE (tem))
1834 > GET_MODE_BITSIZE (GET_MODE (var)))
1835 {
1836 replacement = find_fixup_replacement (replacements, var);
1837 if (replacement->new == 0)
1838 replacement->new = gen_reg_rtx (GET_MODE (var));
1839 SUBREG_REG (tem) = replacement->new;
1840 }
1841 else
1842 tem = fixup_memory_subreg (tem, insn, 0);
1843 }
1844 else
1845 tem = fixup_stack_1 (tem, insn);
1846
1847 /* Unless we want to load from memory, get TEM into the proper mode
1848 for an extract from memory. This can only be done if the
1849 extract is at a constant position and length. */
1850
1851 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1852 && GET_CODE (XEXP (x, 2)) == CONST_INT
1853 && ! mode_dependent_address_p (XEXP (tem, 0))
1854 && ! MEM_VOLATILE_P (tem))
1855 {
1856 enum machine_mode wanted_mode = VOIDmode;
1857 enum machine_mode is_mode = GET_MODE (tem);
1858 int pos = INTVAL (XEXP (x, 2));
1859
1860 #ifdef HAVE_extzv
1861 if (GET_CODE (x) == ZERO_EXTRACT)
1862 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1863 #endif
1864 #ifdef HAVE_extv
1865 if (GET_CODE (x) == SIGN_EXTRACT)
1866 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1867 #endif
1868 /* If we have a narrower mode, we can do something. */
1869 if (wanted_mode != VOIDmode
1870 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1871 {
1872 int offset = pos / BITS_PER_UNIT;
1873 rtx old_pos = XEXP (x, 2);
1874 rtx newmem;
1875
1876 /* If the bytes and bits are counted differently, we
1877 must adjust the offset. */
1878 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1879 offset = (GET_MODE_SIZE (is_mode)
1880 - GET_MODE_SIZE (wanted_mode) - offset);
1881
1882 pos %= GET_MODE_BITSIZE (wanted_mode);
1883
1884 newmem = gen_rtx_MEM (wanted_mode,
1885 plus_constant (XEXP (tem, 0), offset));
1886 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1887 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1888 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1889
1890 /* Make the change and see if the insn remains valid. */
1891 INSN_CODE (insn) = -1;
1892 XEXP (x, 0) = newmem;
1893 XEXP (x, 2) = GEN_INT (pos);
1894
1895 if (recog_memoized (insn) >= 0)
1896 return;
1897
1898 /* Otherwise, restore old position. XEXP (x, 0) will be
1899 restored later. */
1900 XEXP (x, 2) = old_pos;
1901 }
1902 }
1903
1904 /* If we get here, the bitfield extract insn can't accept a memory
1905 reference. Copy the input into a register. */
1906
1907 tem1 = gen_reg_rtx (GET_MODE (tem));
1908 emit_insn_before (gen_move_insn (tem1, tem), insn);
1909 XEXP (x, 0) = tem1;
1910 return;
1911 }
1912 break;
1913
1914 case SUBREG:
1915 if (SUBREG_REG (x) == var)
1916 {
1917 /* If this is a special SUBREG made because VAR was promoted
1918 from a wider mode, replace it with VAR and call ourself
1919 recursively, this time saying that the object previously
1920 had its current mode (by virtue of the SUBREG). */
1921
1922 if (SUBREG_PROMOTED_VAR_P (x))
1923 {
1924 *loc = var;
1925 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1926 return;
1927 }
1928
1929 /* If this SUBREG makes VAR wider, it has become a paradoxical
1930 SUBREG with VAR in memory, but these aren't allowed at this
1931 stage of the compilation. So load VAR into a pseudo and take
1932 a SUBREG of that pseudo. */
1933 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1934 {
1935 replacement = find_fixup_replacement (replacements, var);
1936 if (replacement->new == 0)
1937 replacement->new = gen_reg_rtx (GET_MODE (var));
1938 SUBREG_REG (x) = replacement->new;
1939 return;
1940 }
1941
1942 /* See if we have already found a replacement for this SUBREG.
1943 If so, use it. Otherwise, make a MEM and see if the insn
1944 is recognized. If not, or if we should force MEM into a register,
1945 make a pseudo for this SUBREG. */
1946 replacement = find_fixup_replacement (replacements, x);
1947 if (replacement->new)
1948 {
1949 *loc = replacement->new;
1950 return;
1951 }
1952
1953 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1954
1955 INSN_CODE (insn) = -1;
1956 if (! flag_force_mem && recog_memoized (insn) >= 0)
1957 return;
1958
1959 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1960 return;
1961 }
1962 break;
1963
1964 case SET:
1965 /* First do special simplification of bit-field references. */
1966 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1967 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1968 optimize_bit_field (x, insn, 0);
1969 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1970 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1971 optimize_bit_field (x, insn, NULL_PTR);
1972
1973 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
1974 into a register and then store it back out. */
1975 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
1976 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
1977 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
1978 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
1979 > GET_MODE_SIZE (GET_MODE (var))))
1980 {
1981 replacement = find_fixup_replacement (replacements, var);
1982 if (replacement->new == 0)
1983 replacement->new = gen_reg_rtx (GET_MODE (var));
1984
1985 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
1986 emit_insn_after (gen_move_insn (var, replacement->new), insn);
1987 }
1988
1989 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1990 insn into a pseudo and store the low part of the pseudo into VAR. */
1991 if (GET_CODE (SET_DEST (x)) == SUBREG
1992 && SUBREG_REG (SET_DEST (x)) == var
1993 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1994 > GET_MODE_SIZE (GET_MODE (var))))
1995 {
1996 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1997 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1998 tem)),
1999 insn);
2000 break;
2001 }
2002
2003 {
2004 rtx dest = SET_DEST (x);
2005 rtx src = SET_SRC (x);
2006 #ifdef HAVE_insv
2007 rtx outerdest = dest;
2008 #endif
2009
2010 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2011 || GET_CODE (dest) == SIGN_EXTRACT
2012 || GET_CODE (dest) == ZERO_EXTRACT)
2013 dest = XEXP (dest, 0);
2014
2015 if (GET_CODE (src) == SUBREG)
2016 src = XEXP (src, 0);
2017
2018 /* If VAR does not appear at the top level of the SET
2019 just scan the lower levels of the tree. */
2020
2021 if (src != var && dest != var)
2022 break;
2023
2024 /* We will need to rerecognize this insn. */
2025 INSN_CODE (insn) = -1;
2026
2027 #ifdef HAVE_insv
2028 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2029 {
2030 /* Since this case will return, ensure we fixup all the
2031 operands here. */
2032 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2033 insn, replacements);
2034 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2035 insn, replacements);
2036 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2037 insn, replacements);
2038
2039 tem = XEXP (outerdest, 0);
2040
2041 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2042 that may appear inside a ZERO_EXTRACT.
2043 This was legitimate when the MEM was a REG. */
2044 if (GET_CODE (tem) == SUBREG
2045 && SUBREG_REG (tem) == var)
2046 tem = fixup_memory_subreg (tem, insn, 0);
2047 else
2048 tem = fixup_stack_1 (tem, insn);
2049
2050 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2051 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2052 && ! mode_dependent_address_p (XEXP (tem, 0))
2053 && ! MEM_VOLATILE_P (tem))
2054 {
2055 enum machine_mode wanted_mode
2056 = insn_operand_mode[(int) CODE_FOR_insv][0];
2057 enum machine_mode is_mode = GET_MODE (tem);
2058 int pos = INTVAL (XEXP (outerdest, 2));
2059
2060 /* If we have a narrower mode, we can do something. */
2061 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2062 {
2063 int offset = pos / BITS_PER_UNIT;
2064 rtx old_pos = XEXP (outerdest, 2);
2065 rtx newmem;
2066
2067 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2068 offset = (GET_MODE_SIZE (is_mode)
2069 - GET_MODE_SIZE (wanted_mode) - offset);
2070
2071 pos %= GET_MODE_BITSIZE (wanted_mode);
2072
2073 newmem = gen_rtx_MEM (wanted_mode,
2074 plus_constant (XEXP (tem, 0), offset));
2075 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2076 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2077 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2078
2079 /* Make the change and see if the insn remains valid. */
2080 INSN_CODE (insn) = -1;
2081 XEXP (outerdest, 0) = newmem;
2082 XEXP (outerdest, 2) = GEN_INT (pos);
2083
2084 if (recog_memoized (insn) >= 0)
2085 return;
2086
2087 /* Otherwise, restore old position. XEXP (x, 0) will be
2088 restored later. */
2089 XEXP (outerdest, 2) = old_pos;
2090 }
2091 }
2092
2093 /* If we get here, the bit-field store doesn't allow memory
2094 or isn't located at a constant position. Load the value into
2095 a register, do the store, and put it back into memory. */
2096
2097 tem1 = gen_reg_rtx (GET_MODE (tem));
2098 emit_insn_before (gen_move_insn (tem1, tem), insn);
2099 emit_insn_after (gen_move_insn (tem, tem1), insn);
2100 XEXP (outerdest, 0) = tem1;
2101 return;
2102 }
2103 #endif
2104
2105 /* STRICT_LOW_PART is a no-op on memory references
2106 and it can cause combinations to be unrecognizable,
2107 so eliminate it. */
2108
2109 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2110 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2111
2112 /* A valid insn to copy VAR into or out of a register
2113 must be left alone, to avoid an infinite loop here.
2114 If the reference to VAR is by a subreg, fix that up,
2115 since SUBREG is not valid for a memref.
2116 Also fix up the address of the stack slot.
2117
2118 Note that we must not try to recognize the insn until
2119 after we know that we have valid addresses and no
2120 (subreg (mem ...) ...) constructs, since these interfere
2121 with determining the validity of the insn. */
2122
2123 if ((SET_SRC (x) == var
2124 || (GET_CODE (SET_SRC (x)) == SUBREG
2125 && SUBREG_REG (SET_SRC (x)) == var))
2126 && (GET_CODE (SET_DEST (x)) == REG
2127 || (GET_CODE (SET_DEST (x)) == SUBREG
2128 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2129 && GET_MODE (var) == promoted_mode
2130 && x == single_set (insn))
2131 {
2132 rtx pat;
2133
2134 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2135 if (replacement->new)
2136 SET_SRC (x) = replacement->new;
2137 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2138 SET_SRC (x) = replacement->new
2139 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2140 else
2141 SET_SRC (x) = replacement->new
2142 = fixup_stack_1 (SET_SRC (x), insn);
2143
2144 if (recog_memoized (insn) >= 0)
2145 return;
2146
2147 /* INSN is not valid, but we know that we want to
2148 copy SET_SRC (x) to SET_DEST (x) in some way. So
2149 we generate the move and see whether it requires more
2150 than one insn. If it does, we emit those insns and
2151 delete INSN. Otherwise, we an just replace the pattern
2152 of INSN; we have already verified above that INSN has
2153 no other function that to do X. */
2154
2155 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2156 if (GET_CODE (pat) == SEQUENCE)
2157 {
2158 emit_insn_after (pat, insn);
2159 PUT_CODE (insn, NOTE);
2160 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2161 NOTE_SOURCE_FILE (insn) = 0;
2162 }
2163 else
2164 PATTERN (insn) = pat;
2165
2166 return;
2167 }
2168
2169 if ((SET_DEST (x) == var
2170 || (GET_CODE (SET_DEST (x)) == SUBREG
2171 && SUBREG_REG (SET_DEST (x)) == var))
2172 && (GET_CODE (SET_SRC (x)) == REG
2173 || (GET_CODE (SET_SRC (x)) == SUBREG
2174 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2175 && GET_MODE (var) == promoted_mode
2176 && x == single_set (insn))
2177 {
2178 rtx pat;
2179
2180 if (GET_CODE (SET_DEST (x)) == SUBREG)
2181 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2182 else
2183 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2184
2185 if (recog_memoized (insn) >= 0)
2186 return;
2187
2188 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2189 if (GET_CODE (pat) == SEQUENCE)
2190 {
2191 emit_insn_after (pat, insn);
2192 PUT_CODE (insn, NOTE);
2193 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2194 NOTE_SOURCE_FILE (insn) = 0;
2195 }
2196 else
2197 PATTERN (insn) = pat;
2198
2199 return;
2200 }
2201
2202 /* Otherwise, storing into VAR must be handled specially
2203 by storing into a temporary and copying that into VAR
2204 with a new insn after this one. Note that this case
2205 will be used when storing into a promoted scalar since
2206 the insn will now have different modes on the input
2207 and output and hence will be invalid (except for the case
2208 of setting it to a constant, which does not need any
2209 change if it is valid). We generate extra code in that case,
2210 but combine.c will eliminate it. */
2211
2212 if (dest == var)
2213 {
2214 rtx temp;
2215 rtx fixeddest = SET_DEST (x);
2216
2217 /* STRICT_LOW_PART can be discarded, around a MEM. */
2218 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2219 fixeddest = XEXP (fixeddest, 0);
2220 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2221 if (GET_CODE (fixeddest) == SUBREG)
2222 {
2223 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2224 promoted_mode = GET_MODE (fixeddest);
2225 }
2226 else
2227 fixeddest = fixup_stack_1 (fixeddest, insn);
2228
2229 temp = gen_reg_rtx (promoted_mode);
2230
2231 emit_insn_after (gen_move_insn (fixeddest,
2232 gen_lowpart (GET_MODE (fixeddest),
2233 temp)),
2234 insn);
2235
2236 SET_DEST (x) = temp;
2237 }
2238 }
2239
2240 default:
2241 break;
2242 }
2243
2244 /* Nothing special about this RTX; fix its operands. */
2245
2246 fmt = GET_RTX_FORMAT (code);
2247 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2248 {
2249 if (fmt[i] == 'e')
2250 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2251 if (fmt[i] == 'E')
2252 {
2253 register int j;
2254 for (j = 0; j < XVECLEN (x, i); j++)
2255 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2256 insn, replacements);
2257 }
2258 }
2259 }
2260 \f
2261 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2262 return an rtx (MEM:m1 newaddr) which is equivalent.
2263 If any insns must be emitted to compute NEWADDR, put them before INSN.
2264
2265 UNCRITICAL nonzero means accept paradoxical subregs.
2266 This is used for subregs found inside REG_NOTES. */
2267
2268 static rtx
2269 fixup_memory_subreg (x, insn, uncritical)
2270 rtx x;
2271 rtx insn;
2272 int uncritical;
2273 {
2274 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2275 rtx addr = XEXP (SUBREG_REG (x), 0);
2276 enum machine_mode mode = GET_MODE (x);
2277 rtx result;
2278
2279 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2280 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2281 && ! uncritical)
2282 abort ();
2283
2284 if (BYTES_BIG_ENDIAN)
2285 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2286 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2287 addr = plus_constant (addr, offset);
2288 if (!flag_force_addr && memory_address_p (mode, addr))
2289 /* Shortcut if no insns need be emitted. */
2290 return change_address (SUBREG_REG (x), mode, addr);
2291 start_sequence ();
2292 result = change_address (SUBREG_REG (x), mode, addr);
2293 emit_insn_before (gen_sequence (), insn);
2294 end_sequence ();
2295 return result;
2296 }
2297
2298 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2299 Replace subexpressions of X in place.
2300 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2301 Otherwise return X, with its contents possibly altered.
2302
2303 If any insns must be emitted to compute NEWADDR, put them before INSN.
2304
2305 UNCRITICAL is as in fixup_memory_subreg. */
2306
2307 static rtx
2308 walk_fixup_memory_subreg (x, insn, uncritical)
2309 register rtx x;
2310 rtx insn;
2311 int uncritical;
2312 {
2313 register enum rtx_code code;
2314 register char *fmt;
2315 register int i;
2316
2317 if (x == 0)
2318 return 0;
2319
2320 code = GET_CODE (x);
2321
2322 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2323 return fixup_memory_subreg (x, insn, uncritical);
2324
2325 /* Nothing special about this RTX; fix its operands. */
2326
2327 fmt = GET_RTX_FORMAT (code);
2328 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2329 {
2330 if (fmt[i] == 'e')
2331 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2332 if (fmt[i] == 'E')
2333 {
2334 register int j;
2335 for (j = 0; j < XVECLEN (x, i); j++)
2336 XVECEXP (x, i, j)
2337 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2338 }
2339 }
2340 return x;
2341 }
2342 \f
2343 /* For each memory ref within X, if it refers to a stack slot
2344 with an out of range displacement, put the address in a temp register
2345 (emitting new insns before INSN to load these registers)
2346 and alter the memory ref to use that register.
2347 Replace each such MEM rtx with a copy, to avoid clobberage. */
2348
2349 static rtx
2350 fixup_stack_1 (x, insn)
2351 rtx x;
2352 rtx insn;
2353 {
2354 register int i;
2355 register RTX_CODE code = GET_CODE (x);
2356 register char *fmt;
2357
2358 if (code == MEM)
2359 {
2360 register rtx ad = XEXP (x, 0);
2361 /* If we have address of a stack slot but it's not valid
2362 (displacement is too large), compute the sum in a register. */
2363 if (GET_CODE (ad) == PLUS
2364 && GET_CODE (XEXP (ad, 0)) == REG
2365 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2366 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2367 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2368 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2369 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2370 #endif
2371 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2372 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2373 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2374 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2375 {
2376 rtx temp, seq;
2377 if (memory_address_p (GET_MODE (x), ad))
2378 return x;
2379
2380 start_sequence ();
2381 temp = copy_to_reg (ad);
2382 seq = gen_sequence ();
2383 end_sequence ();
2384 emit_insn_before (seq, insn);
2385 return change_address (x, VOIDmode, temp);
2386 }
2387 return x;
2388 }
2389
2390 fmt = GET_RTX_FORMAT (code);
2391 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2392 {
2393 if (fmt[i] == 'e')
2394 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2395 if (fmt[i] == 'E')
2396 {
2397 register int j;
2398 for (j = 0; j < XVECLEN (x, i); j++)
2399 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2400 }
2401 }
2402 return x;
2403 }
2404 \f
2405 /* Optimization: a bit-field instruction whose field
2406 happens to be a byte or halfword in memory
2407 can be changed to a move instruction.
2408
2409 We call here when INSN is an insn to examine or store into a bit-field.
2410 BODY is the SET-rtx to be altered.
2411
2412 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2413 (Currently this is called only from function.c, and EQUIV_MEM
2414 is always 0.) */
2415
2416 static void
2417 optimize_bit_field (body, insn, equiv_mem)
2418 rtx body;
2419 rtx insn;
2420 rtx *equiv_mem;
2421 {
2422 register rtx bitfield;
2423 int destflag;
2424 rtx seq = 0;
2425 enum machine_mode mode;
2426
2427 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2428 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2429 bitfield = SET_DEST (body), destflag = 1;
2430 else
2431 bitfield = SET_SRC (body), destflag = 0;
2432
2433 /* First check that the field being stored has constant size and position
2434 and is in fact a byte or halfword suitably aligned. */
2435
2436 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2437 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2438 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2439 != BLKmode)
2440 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2441 {
2442 register rtx memref = 0;
2443
2444 /* Now check that the containing word is memory, not a register,
2445 and that it is safe to change the machine mode. */
2446
2447 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2448 memref = XEXP (bitfield, 0);
2449 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2450 && equiv_mem != 0)
2451 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2452 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2453 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2454 memref = SUBREG_REG (XEXP (bitfield, 0));
2455 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2456 && equiv_mem != 0
2457 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2458 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2459
2460 if (memref
2461 && ! mode_dependent_address_p (XEXP (memref, 0))
2462 && ! MEM_VOLATILE_P (memref))
2463 {
2464 /* Now adjust the address, first for any subreg'ing
2465 that we are now getting rid of,
2466 and then for which byte of the word is wanted. */
2467
2468 register int offset = INTVAL (XEXP (bitfield, 2));
2469 rtx insns;
2470
2471 /* Adjust OFFSET to count bits from low-address byte. */
2472 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2473 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2474 - offset - INTVAL (XEXP (bitfield, 1)));
2475
2476 /* Adjust OFFSET to count bytes from low-address byte. */
2477 offset /= BITS_PER_UNIT;
2478 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2479 {
2480 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2481 if (BYTES_BIG_ENDIAN)
2482 offset -= (MIN (UNITS_PER_WORD,
2483 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2484 - MIN (UNITS_PER_WORD,
2485 GET_MODE_SIZE (GET_MODE (memref))));
2486 }
2487
2488 start_sequence ();
2489 memref = change_address (memref, mode,
2490 plus_constant (XEXP (memref, 0), offset));
2491 insns = get_insns ();
2492 end_sequence ();
2493 emit_insns_before (insns, insn);
2494
2495 /* Store this memory reference where
2496 we found the bit field reference. */
2497
2498 if (destflag)
2499 {
2500 validate_change (insn, &SET_DEST (body), memref, 1);
2501 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2502 {
2503 rtx src = SET_SRC (body);
2504 while (GET_CODE (src) == SUBREG
2505 && SUBREG_WORD (src) == 0)
2506 src = SUBREG_REG (src);
2507 if (GET_MODE (src) != GET_MODE (memref))
2508 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2509 validate_change (insn, &SET_SRC (body), src, 1);
2510 }
2511 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2512 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2513 /* This shouldn't happen because anything that didn't have
2514 one of these modes should have got converted explicitly
2515 and then referenced through a subreg.
2516 This is so because the original bit-field was
2517 handled by agg_mode and so its tree structure had
2518 the same mode that memref now has. */
2519 abort ();
2520 }
2521 else
2522 {
2523 rtx dest = SET_DEST (body);
2524
2525 while (GET_CODE (dest) == SUBREG
2526 && SUBREG_WORD (dest) == 0
2527 && (GET_MODE_CLASS (GET_MODE (dest))
2528 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2529 dest = SUBREG_REG (dest);
2530
2531 validate_change (insn, &SET_DEST (body), dest, 1);
2532
2533 if (GET_MODE (dest) == GET_MODE (memref))
2534 validate_change (insn, &SET_SRC (body), memref, 1);
2535 else
2536 {
2537 /* Convert the mem ref to the destination mode. */
2538 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2539
2540 start_sequence ();
2541 convert_move (newreg, memref,
2542 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2543 seq = get_insns ();
2544 end_sequence ();
2545
2546 validate_change (insn, &SET_SRC (body), newreg, 1);
2547 }
2548 }
2549
2550 /* See if we can convert this extraction or insertion into
2551 a simple move insn. We might not be able to do so if this
2552 was, for example, part of a PARALLEL.
2553
2554 If we succeed, write out any needed conversions. If we fail,
2555 it is hard to guess why we failed, so don't do anything
2556 special; just let the optimization be suppressed. */
2557
2558 if (apply_change_group () && seq)
2559 emit_insns_before (seq, insn);
2560 }
2561 }
2562 }
2563 \f
2564 /* These routines are responsible for converting virtual register references
2565 to the actual hard register references once RTL generation is complete.
2566
2567 The following four variables are used for communication between the
2568 routines. They contain the offsets of the virtual registers from their
2569 respective hard registers. */
2570
2571 static int in_arg_offset;
2572 static int var_offset;
2573 static int dynamic_offset;
2574 static int out_arg_offset;
2575
2576 /* In most machines, the stack pointer register is equivalent to the bottom
2577 of the stack. */
2578
2579 #ifndef STACK_POINTER_OFFSET
2580 #define STACK_POINTER_OFFSET 0
2581 #endif
2582
2583 /* If not defined, pick an appropriate default for the offset of dynamically
2584 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2585 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2586
2587 #ifndef STACK_DYNAMIC_OFFSET
2588
2589 #ifdef ACCUMULATE_OUTGOING_ARGS
2590 /* The bottom of the stack points to the actual arguments. If
2591 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2592 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2593 stack space for register parameters is not pushed by the caller, but
2594 rather part of the fixed stack areas and hence not included in
2595 `current_function_outgoing_args_size'. Nevertheless, we must allow
2596 for it when allocating stack dynamic objects. */
2597
2598 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2599 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2600 (current_function_outgoing_args_size \
2601 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2602
2603 #else
2604 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2605 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2606 #endif
2607
2608 #else
2609 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2610 #endif
2611 #endif
2612
2613 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2614 its address taken. DECL is the decl for the object stored in the
2615 register, for later use if we do need to force REG into the stack.
2616 REG is overwritten by the MEM like in put_reg_into_stack. */
2617
2618 rtx
2619 gen_mem_addressof (reg, decl)
2620 rtx reg;
2621 tree decl;
2622 {
2623 tree type = TREE_TYPE (decl);
2624
2625 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2626 SET_ADDRESSOF_DECL (r, decl);
2627
2628 XEXP (reg, 0) = r;
2629 PUT_CODE (reg, MEM);
2630 PUT_MODE (reg, DECL_MODE (decl));
2631 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2632 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2633
2634 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2635 return reg;
2636 }
2637
2638 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2639
2640 void
2641 flush_addressof (decl)
2642 tree decl;
2643 {
2644 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2645 && DECL_RTL (decl) != 0
2646 && GET_CODE (DECL_RTL (decl)) == MEM
2647 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2648 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2649 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2650 }
2651
2652 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2653
2654 static void
2655 put_addressof_into_stack (r)
2656 rtx r;
2657 {
2658 tree decl = ADDRESSOF_DECL (r);
2659 rtx reg = XEXP (r, 0);
2660
2661 if (GET_CODE (reg) != REG)
2662 abort ();
2663
2664 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2665 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2666 ADDRESSOF_REGNO (r));
2667 }
2668
2669 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2670 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2671 the stack. */
2672
2673 static void
2674 purge_addressof_1 (loc, insn, force)
2675 rtx *loc;
2676 rtx insn;
2677 int force;
2678 {
2679 rtx x;
2680 RTX_CODE code;
2681 int i, j;
2682 char *fmt;
2683
2684 /* Re-start here to avoid recursion in common cases. */
2685 restart:
2686
2687 x = *loc;
2688 if (x == 0)
2689 return;
2690
2691 code = GET_CODE (x);
2692
2693 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2694 {
2695 rtx insns;
2696 /* We must create a copy of the rtx because it was created by
2697 overwriting a REG rtx which is always shared. */
2698 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2699
2700 if (validate_change (insn, loc, sub, 0))
2701 return;
2702
2703 start_sequence ();
2704 if (! validate_change (insn, loc,
2705 force_operand (sub, NULL_RTX),
2706 0))
2707 abort ();
2708
2709 insns = get_insns ();
2710 end_sequence ();
2711 emit_insns_before (insns, insn);
2712 return;
2713 }
2714 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2715 {
2716 rtx sub = XEXP (XEXP (x, 0), 0);
2717 if (GET_CODE (sub) == MEM)
2718 sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2719 if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2720 {
2721 if (! BYTES_BIG_ENDIAN && ! WORDS_BIG_ENDIAN)
2722 {
2723 rtx sub2 = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
2724 if (validate_change (insn, loc, sub2, 0))
2725 goto restart;
2726 }
2727 }
2728 else if (validate_change (insn, loc, sub, 0))
2729 goto restart;
2730 /* else give up and put it into the stack */
2731 }
2732 else if (code == ADDRESSOF)
2733 {
2734 put_addressof_into_stack (x);
2735 return;
2736 }
2737
2738 /* Scan all subexpressions. */
2739 fmt = GET_RTX_FORMAT (code);
2740 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2741 {
2742 if (*fmt == 'e')
2743 purge_addressof_1 (&XEXP (x, i), insn, force);
2744 else if (*fmt == 'E')
2745 for (j = 0; j < XVECLEN (x, i); j++)
2746 purge_addressof_1 (&XVECEXP (x, i, j), insn, force);
2747 }
2748 }
2749
2750 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2751 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2752 stack. */
2753
2754 void
2755 purge_addressof (insns)
2756 rtx insns;
2757 {
2758 rtx insn;
2759 for (insn = insns; insn; insn = NEXT_INSN (insn))
2760 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2761 || GET_CODE (insn) == CALL_INSN)
2762 {
2763 purge_addressof_1 (&PATTERN (insn), insn,
2764 asm_noperands (PATTERN (insn)) > 0);
2765 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0);
2766 }
2767 }
2768 \f
2769 /* Pass through the INSNS of function FNDECL and convert virtual register
2770 references to hard register references. */
2771
2772 void
2773 instantiate_virtual_regs (fndecl, insns)
2774 tree fndecl;
2775 rtx insns;
2776 {
2777 rtx insn;
2778 int i;
2779
2780 /* Compute the offsets to use for this function. */
2781 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2782 var_offset = STARTING_FRAME_OFFSET;
2783 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2784 out_arg_offset = STACK_POINTER_OFFSET;
2785
2786 /* Scan all variables and parameters of this function. For each that is
2787 in memory, instantiate all virtual registers if the result is a valid
2788 address. If not, we do it later. That will handle most uses of virtual
2789 regs on many machines. */
2790 instantiate_decls (fndecl, 1);
2791
2792 /* Initialize recognition, indicating that volatile is OK. */
2793 init_recog ();
2794
2795 /* Scan through all the insns, instantiating every virtual register still
2796 present. */
2797 for (insn = insns; insn; insn = NEXT_INSN (insn))
2798 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2799 || GET_CODE (insn) == CALL_INSN)
2800 {
2801 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2802 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2803 }
2804
2805 /* Instantiate the stack slots for the parm registers, for later use in
2806 addressof elimination. */
2807 for (i = 0; i < max_parm_reg; ++i)
2808 if (parm_reg_stack_loc[i])
2809 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
2810
2811 /* Now instantiate the remaining register equivalences for debugging info.
2812 These will not be valid addresses. */
2813 instantiate_decls (fndecl, 0);
2814
2815 /* Indicate that, from now on, assign_stack_local should use
2816 frame_pointer_rtx. */
2817 virtuals_instantiated = 1;
2818 }
2819
2820 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2821 all virtual registers in their DECL_RTL's.
2822
2823 If VALID_ONLY, do this only if the resulting address is still valid.
2824 Otherwise, always do it. */
2825
2826 static void
2827 instantiate_decls (fndecl, valid_only)
2828 tree fndecl;
2829 int valid_only;
2830 {
2831 tree decl;
2832
2833 if (DECL_SAVED_INSNS (fndecl))
2834 /* When compiling an inline function, the obstack used for
2835 rtl allocation is the maybepermanent_obstack. Calling
2836 `resume_temporary_allocation' switches us back to that
2837 obstack while we process this function's parameters. */
2838 resume_temporary_allocation ();
2839
2840 /* Process all parameters of the function. */
2841 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2842 {
2843 int size = int_size_in_bytes (TREE_TYPE (decl));
2844 instantiate_decl (DECL_RTL (decl), size, valid_only);
2845
2846 /* If the parameter was promoted, then the incoming RTL mode may be
2847 larger than the declared type size. We must use the larger of
2848 the two sizes. */
2849 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
2850 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
2851 }
2852
2853 /* Now process all variables defined in the function or its subblocks. */
2854 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2855
2856 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2857 {
2858 /* Save all rtl allocated for this function by raising the
2859 high-water mark on the maybepermanent_obstack. */
2860 preserve_data ();
2861 /* All further rtl allocation is now done in the current_obstack. */
2862 rtl_in_current_obstack ();
2863 }
2864 }
2865
2866 /* Subroutine of instantiate_decls: Process all decls in the given
2867 BLOCK node and all its subblocks. */
2868
2869 static void
2870 instantiate_decls_1 (let, valid_only)
2871 tree let;
2872 int valid_only;
2873 {
2874 tree t;
2875
2876 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2877 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2878 valid_only);
2879
2880 /* Process all subblocks. */
2881 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2882 instantiate_decls_1 (t, valid_only);
2883 }
2884
2885 /* Subroutine of the preceding procedures: Given RTL representing a
2886 decl and the size of the object, do any instantiation required.
2887
2888 If VALID_ONLY is non-zero, it means that the RTL should only be
2889 changed if the new address is valid. */
2890
2891 static void
2892 instantiate_decl (x, size, valid_only)
2893 rtx x;
2894 int size;
2895 int valid_only;
2896 {
2897 enum machine_mode mode;
2898 rtx addr;
2899
2900 /* If this is not a MEM, no need to do anything. Similarly if the
2901 address is a constant or a register that is not a virtual register. */
2902
2903 if (x == 0 || GET_CODE (x) != MEM)
2904 return;
2905
2906 addr = XEXP (x, 0);
2907 if (CONSTANT_P (addr)
2908 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
2909 || (GET_CODE (addr) == REG
2910 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2911 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2912 return;
2913
2914 /* If we should only do this if the address is valid, copy the address.
2915 We need to do this so we can undo any changes that might make the
2916 address invalid. This copy is unfortunate, but probably can't be
2917 avoided. */
2918
2919 if (valid_only)
2920 addr = copy_rtx (addr);
2921
2922 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2923
2924 if (valid_only)
2925 {
2926 /* Now verify that the resulting address is valid for every integer or
2927 floating-point mode up to and including SIZE bytes long. We do this
2928 since the object might be accessed in any mode and frame addresses
2929 are shared. */
2930
2931 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2932 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2933 mode = GET_MODE_WIDER_MODE (mode))
2934 if (! memory_address_p (mode, addr))
2935 return;
2936
2937 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2938 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2939 mode = GET_MODE_WIDER_MODE (mode))
2940 if (! memory_address_p (mode, addr))
2941 return;
2942 }
2943
2944 /* Put back the address now that we have updated it and we either know
2945 it is valid or we don't care whether it is valid. */
2946
2947 XEXP (x, 0) = addr;
2948 }
2949 \f
2950 /* Given a pointer to a piece of rtx and an optional pointer to the
2951 containing object, instantiate any virtual registers present in it.
2952
2953 If EXTRA_INSNS, we always do the replacement and generate
2954 any extra insns before OBJECT. If it zero, we do nothing if replacement
2955 is not valid.
2956
2957 Return 1 if we either had nothing to do or if we were able to do the
2958 needed replacement. Return 0 otherwise; we only return zero if
2959 EXTRA_INSNS is zero.
2960
2961 We first try some simple transformations to avoid the creation of extra
2962 pseudos. */
2963
2964 static int
2965 instantiate_virtual_regs_1 (loc, object, extra_insns)
2966 rtx *loc;
2967 rtx object;
2968 int extra_insns;
2969 {
2970 rtx x;
2971 RTX_CODE code;
2972 rtx new = 0;
2973 int offset;
2974 rtx temp;
2975 rtx seq;
2976 int i, j;
2977 char *fmt;
2978
2979 /* Re-start here to avoid recursion in common cases. */
2980 restart:
2981
2982 x = *loc;
2983 if (x == 0)
2984 return 1;
2985
2986 code = GET_CODE (x);
2987
2988 /* Check for some special cases. */
2989 switch (code)
2990 {
2991 case CONST_INT:
2992 case CONST_DOUBLE:
2993 case CONST:
2994 case SYMBOL_REF:
2995 case CODE_LABEL:
2996 case PC:
2997 case CC0:
2998 case ASM_INPUT:
2999 case ADDR_VEC:
3000 case ADDR_DIFF_VEC:
3001 case RETURN:
3002 return 1;
3003
3004 case SET:
3005 /* We are allowed to set the virtual registers. This means that
3006 that the actual register should receive the source minus the
3007 appropriate offset. This is used, for example, in the handling
3008 of non-local gotos. */
3009 if (SET_DEST (x) == virtual_incoming_args_rtx)
3010 new = arg_pointer_rtx, offset = - in_arg_offset;
3011 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3012 new = frame_pointer_rtx, offset = - var_offset;
3013 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3014 new = stack_pointer_rtx, offset = - dynamic_offset;
3015 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3016 new = stack_pointer_rtx, offset = - out_arg_offset;
3017
3018 if (new)
3019 {
3020 /* The only valid sources here are PLUS or REG. Just do
3021 the simplest possible thing to handle them. */
3022 if (GET_CODE (SET_SRC (x)) != REG
3023 && GET_CODE (SET_SRC (x)) != PLUS)
3024 abort ();
3025
3026 start_sequence ();
3027 if (GET_CODE (SET_SRC (x)) != REG)
3028 temp = force_operand (SET_SRC (x), NULL_RTX);
3029 else
3030 temp = SET_SRC (x);
3031 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3032 seq = get_insns ();
3033 end_sequence ();
3034
3035 emit_insns_before (seq, object);
3036 SET_DEST (x) = new;
3037
3038 if (! validate_change (object, &SET_SRC (x), temp, 0)
3039 || ! extra_insns)
3040 abort ();
3041
3042 return 1;
3043 }
3044
3045 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3046 loc = &SET_SRC (x);
3047 goto restart;
3048
3049 case PLUS:
3050 /* Handle special case of virtual register plus constant. */
3051 if (CONSTANT_P (XEXP (x, 1)))
3052 {
3053 rtx old, new_offset;
3054
3055 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3056 if (GET_CODE (XEXP (x, 0)) == PLUS)
3057 {
3058 rtx inner = XEXP (XEXP (x, 0), 0);
3059
3060 if (inner == virtual_incoming_args_rtx)
3061 new = arg_pointer_rtx, offset = in_arg_offset;
3062 else if (inner == virtual_stack_vars_rtx)
3063 new = frame_pointer_rtx, offset = var_offset;
3064 else if (inner == virtual_stack_dynamic_rtx)
3065 new = stack_pointer_rtx, offset = dynamic_offset;
3066 else if (inner == virtual_outgoing_args_rtx)
3067 new = stack_pointer_rtx, offset = out_arg_offset;
3068 else
3069 {
3070 loc = &XEXP (x, 0);
3071 goto restart;
3072 }
3073
3074 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3075 extra_insns);
3076 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3077 }
3078
3079 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3080 new = arg_pointer_rtx, offset = in_arg_offset;
3081 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3082 new = frame_pointer_rtx, offset = var_offset;
3083 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3084 new = stack_pointer_rtx, offset = dynamic_offset;
3085 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3086 new = stack_pointer_rtx, offset = out_arg_offset;
3087 else
3088 {
3089 /* We know the second operand is a constant. Unless the
3090 first operand is a REG (which has been already checked),
3091 it needs to be checked. */
3092 if (GET_CODE (XEXP (x, 0)) != REG)
3093 {
3094 loc = &XEXP (x, 0);
3095 goto restart;
3096 }
3097 return 1;
3098 }
3099
3100 new_offset = plus_constant (XEXP (x, 1), offset);
3101
3102 /* If the new constant is zero, try to replace the sum with just
3103 the register. */
3104 if (new_offset == const0_rtx
3105 && validate_change (object, loc, new, 0))
3106 return 1;
3107
3108 /* Next try to replace the register and new offset.
3109 There are two changes to validate here and we can't assume that
3110 in the case of old offset equals new just changing the register
3111 will yield a valid insn. In the interests of a little efficiency,
3112 however, we only call validate change once (we don't queue up the
3113 changes and then call apply_change_group). */
3114
3115 old = XEXP (x, 0);
3116 if (offset == 0
3117 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3118 : (XEXP (x, 0) = new,
3119 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3120 {
3121 if (! extra_insns)
3122 {
3123 XEXP (x, 0) = old;
3124 return 0;
3125 }
3126
3127 /* Otherwise copy the new constant into a register and replace
3128 constant with that register. */
3129 temp = gen_reg_rtx (Pmode);
3130 XEXP (x, 0) = new;
3131 if (validate_change (object, &XEXP (x, 1), temp, 0))
3132 emit_insn_before (gen_move_insn (temp, new_offset), object);
3133 else
3134 {
3135 /* If that didn't work, replace this expression with a
3136 register containing the sum. */
3137
3138 XEXP (x, 0) = old;
3139 new = gen_rtx_PLUS (Pmode, new, new_offset);
3140
3141 start_sequence ();
3142 temp = force_operand (new, NULL_RTX);
3143 seq = get_insns ();
3144 end_sequence ();
3145
3146 emit_insns_before (seq, object);
3147 if (! validate_change (object, loc, temp, 0)
3148 && ! validate_replace_rtx (x, temp, object))
3149 abort ();
3150 }
3151 }
3152
3153 return 1;
3154 }
3155
3156 /* Fall through to generic two-operand expression case. */
3157 case EXPR_LIST:
3158 case CALL:
3159 case COMPARE:
3160 case MINUS:
3161 case MULT:
3162 case DIV: case UDIV:
3163 case MOD: case UMOD:
3164 case AND: case IOR: case XOR:
3165 case ROTATERT: case ROTATE:
3166 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3167 case NE: case EQ:
3168 case GE: case GT: case GEU: case GTU:
3169 case LE: case LT: case LEU: case LTU:
3170 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3171 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3172 loc = &XEXP (x, 0);
3173 goto restart;
3174
3175 case MEM:
3176 /* Most cases of MEM that convert to valid addresses have already been
3177 handled by our scan of decls. The only special handling we
3178 need here is to make a copy of the rtx to ensure it isn't being
3179 shared if we have to change it to a pseudo.
3180
3181 If the rtx is a simple reference to an address via a virtual register,
3182 it can potentially be shared. In such cases, first try to make it
3183 a valid address, which can also be shared. Otherwise, copy it and
3184 proceed normally.
3185
3186 First check for common cases that need no processing. These are
3187 usually due to instantiation already being done on a previous instance
3188 of a shared rtx. */
3189
3190 temp = XEXP (x, 0);
3191 if (CONSTANT_ADDRESS_P (temp)
3192 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3193 || temp == arg_pointer_rtx
3194 #endif
3195 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3196 || temp == hard_frame_pointer_rtx
3197 #endif
3198 || temp == frame_pointer_rtx)
3199 return 1;
3200
3201 if (GET_CODE (temp) == PLUS
3202 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3203 && (XEXP (temp, 0) == frame_pointer_rtx
3204 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3205 || XEXP (temp, 0) == hard_frame_pointer_rtx
3206 #endif
3207 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3208 || XEXP (temp, 0) == arg_pointer_rtx
3209 #endif
3210 ))
3211 return 1;
3212
3213 if (temp == virtual_stack_vars_rtx
3214 || temp == virtual_incoming_args_rtx
3215 || (GET_CODE (temp) == PLUS
3216 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3217 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3218 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3219 {
3220 /* This MEM may be shared. If the substitution can be done without
3221 the need to generate new pseudos, we want to do it in place
3222 so all copies of the shared rtx benefit. The call below will
3223 only make substitutions if the resulting address is still
3224 valid.
3225
3226 Note that we cannot pass X as the object in the recursive call
3227 since the insn being processed may not allow all valid
3228 addresses. However, if we were not passed on object, we can
3229 only modify X without copying it if X will have a valid
3230 address.
3231
3232 ??? Also note that this can still lose if OBJECT is an insn that
3233 has less restrictions on an address that some other insn.
3234 In that case, we will modify the shared address. This case
3235 doesn't seem very likely, though. One case where this could
3236 happen is in the case of a USE or CLOBBER reference, but we
3237 take care of that below. */
3238
3239 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3240 object ? object : x, 0))
3241 return 1;
3242
3243 /* Otherwise make a copy and process that copy. We copy the entire
3244 RTL expression since it might be a PLUS which could also be
3245 shared. */
3246 *loc = x = copy_rtx (x);
3247 }
3248
3249 /* Fall through to generic unary operation case. */
3250 case SUBREG:
3251 case STRICT_LOW_PART:
3252 case NEG: case NOT:
3253 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3254 case SIGN_EXTEND: case ZERO_EXTEND:
3255 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3256 case FLOAT: case FIX:
3257 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3258 case ABS:
3259 case SQRT:
3260 case FFS:
3261 /* These case either have just one operand or we know that we need not
3262 check the rest of the operands. */
3263 loc = &XEXP (x, 0);
3264 goto restart;
3265
3266 case USE:
3267 case CLOBBER:
3268 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3269 go ahead and make the invalid one, but do it to a copy. For a REG,
3270 just make the recursive call, since there's no chance of a problem. */
3271
3272 if ((GET_CODE (XEXP (x, 0)) == MEM
3273 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3274 0))
3275 || (GET_CODE (XEXP (x, 0)) == REG
3276 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3277 return 1;
3278
3279 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3280 loc = &XEXP (x, 0);
3281 goto restart;
3282
3283 case REG:
3284 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3285 in front of this insn and substitute the temporary. */
3286 if (x == virtual_incoming_args_rtx)
3287 new = arg_pointer_rtx, offset = in_arg_offset;
3288 else if (x == virtual_stack_vars_rtx)
3289 new = frame_pointer_rtx, offset = var_offset;
3290 else if (x == virtual_stack_dynamic_rtx)
3291 new = stack_pointer_rtx, offset = dynamic_offset;
3292 else if (x == virtual_outgoing_args_rtx)
3293 new = stack_pointer_rtx, offset = out_arg_offset;
3294
3295 if (new)
3296 {
3297 temp = plus_constant (new, offset);
3298 if (!validate_change (object, loc, temp, 0))
3299 {
3300 if (! extra_insns)
3301 return 0;
3302
3303 start_sequence ();
3304 temp = force_operand (temp, NULL_RTX);
3305 seq = get_insns ();
3306 end_sequence ();
3307
3308 emit_insns_before (seq, object);
3309 if (! validate_change (object, loc, temp, 0)
3310 && ! validate_replace_rtx (x, temp, object))
3311 abort ();
3312 }
3313 }
3314
3315 return 1;
3316
3317 case ADDRESSOF:
3318 if (GET_CODE (XEXP (x, 0)) == REG)
3319 return 1;
3320
3321 else if (GET_CODE (XEXP (x, 0)) == MEM)
3322 {
3323 /* If we have a (addressof (mem ..)), do any instantiation inside
3324 since we know we'll be making the inside valid when we finally
3325 remove the ADDRESSOF. */
3326 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3327 return 1;
3328 }
3329 break;
3330
3331 default:
3332 break;
3333 }
3334
3335 /* Scan all subexpressions. */
3336 fmt = GET_RTX_FORMAT (code);
3337 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3338 if (*fmt == 'e')
3339 {
3340 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3341 return 0;
3342 }
3343 else if (*fmt == 'E')
3344 for (j = 0; j < XVECLEN (x, i); j++)
3345 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3346 extra_insns))
3347 return 0;
3348
3349 return 1;
3350 }
3351 \f
3352 /* Optimization: assuming this function does not receive nonlocal gotos,
3353 delete the handlers for such, as well as the insns to establish
3354 and disestablish them. */
3355
3356 static void
3357 delete_handlers ()
3358 {
3359 rtx insn;
3360 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3361 {
3362 /* Delete the handler by turning off the flag that would
3363 prevent jump_optimize from deleting it.
3364 Also permit deletion of the nonlocal labels themselves
3365 if nothing local refers to them. */
3366 if (GET_CODE (insn) == CODE_LABEL)
3367 {
3368 tree t, last_t;
3369
3370 LABEL_PRESERVE_P (insn) = 0;
3371
3372 /* Remove it from the nonlocal_label list, to avoid confusing
3373 flow. */
3374 for (t = nonlocal_labels, last_t = 0; t;
3375 last_t = t, t = TREE_CHAIN (t))
3376 if (DECL_RTL (TREE_VALUE (t)) == insn)
3377 break;
3378 if (t)
3379 {
3380 if (! last_t)
3381 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3382 else
3383 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3384 }
3385 }
3386 if (GET_CODE (insn) == INSN
3387 && ((nonlocal_goto_handler_slot != 0
3388 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3389 || (nonlocal_goto_stack_level != 0
3390 && reg_mentioned_p (nonlocal_goto_stack_level,
3391 PATTERN (insn)))))
3392 delete_insn (insn);
3393 }
3394 }
3395
3396 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3397 of the current function. */
3398
3399 rtx
3400 nonlocal_label_rtx_list ()
3401 {
3402 tree t;
3403 rtx x = 0;
3404
3405 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3406 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3407
3408 return x;
3409 }
3410 \f
3411 /* Output a USE for any register use in RTL.
3412 This is used with -noreg to mark the extent of lifespan
3413 of any registers used in a user-visible variable's DECL_RTL. */
3414
3415 void
3416 use_variable (rtl)
3417 rtx rtl;
3418 {
3419 if (GET_CODE (rtl) == REG)
3420 /* This is a register variable. */
3421 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3422 else if (GET_CODE (rtl) == MEM
3423 && GET_CODE (XEXP (rtl, 0)) == REG
3424 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3425 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3426 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3427 /* This is a variable-sized structure. */
3428 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3429 }
3430
3431 /* Like use_variable except that it outputs the USEs after INSN
3432 instead of at the end of the insn-chain. */
3433
3434 void
3435 use_variable_after (rtl, insn)
3436 rtx rtl, insn;
3437 {
3438 if (GET_CODE (rtl) == REG)
3439 /* This is a register variable. */
3440 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3441 else if (GET_CODE (rtl) == MEM
3442 && GET_CODE (XEXP (rtl, 0)) == REG
3443 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3444 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3445 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3446 /* This is a variable-sized structure. */
3447 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3448 }
3449 \f
3450 int
3451 max_parm_reg_num ()
3452 {
3453 return max_parm_reg;
3454 }
3455
3456 /* Return the first insn following those generated by `assign_parms'. */
3457
3458 rtx
3459 get_first_nonparm_insn ()
3460 {
3461 if (last_parm_insn)
3462 return NEXT_INSN (last_parm_insn);
3463 return get_insns ();
3464 }
3465
3466 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3467 Crash if there is none. */
3468
3469 rtx
3470 get_first_block_beg ()
3471 {
3472 register rtx searcher;
3473 register rtx insn = get_first_nonparm_insn ();
3474
3475 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3476 if (GET_CODE (searcher) == NOTE
3477 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3478 return searcher;
3479
3480 abort (); /* Invalid call to this function. (See comments above.) */
3481 return NULL_RTX;
3482 }
3483
3484 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3485 This means a type for which function calls must pass an address to the
3486 function or get an address back from the function.
3487 EXP may be a type node or an expression (whose type is tested). */
3488
3489 int
3490 aggregate_value_p (exp)
3491 tree exp;
3492 {
3493 int i, regno, nregs;
3494 rtx reg;
3495 tree type;
3496 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3497 type = exp;
3498 else
3499 type = TREE_TYPE (exp);
3500
3501 if (RETURN_IN_MEMORY (type))
3502 return 1;
3503 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3504 and thus can't be returned in registers. */
3505 if (TREE_ADDRESSABLE (type))
3506 return 1;
3507 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3508 return 1;
3509 /* Make sure we have suitable call-clobbered regs to return
3510 the value in; if not, we must return it in memory. */
3511 reg = hard_function_value (type, 0);
3512
3513 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3514 it is OK. */
3515 if (GET_CODE (reg) != REG)
3516 return 0;
3517
3518 regno = REGNO (reg);
3519 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3520 for (i = 0; i < nregs; i++)
3521 if (! call_used_regs[regno + i])
3522 return 1;
3523 return 0;
3524 }
3525 \f
3526 /* Assign RTL expressions to the function's parameters.
3527 This may involve copying them into registers and using
3528 those registers as the RTL for them.
3529
3530 If SECOND_TIME is non-zero it means that this function is being
3531 called a second time. This is done by integrate.c when a function's
3532 compilation is deferred. We need to come back here in case the
3533 FUNCTION_ARG macro computes items needed for the rest of the compilation
3534 (such as changing which registers are fixed or caller-saved). But suppress
3535 writing any insns or setting DECL_RTL of anything in this case. */
3536
3537 void
3538 assign_parms (fndecl, second_time)
3539 tree fndecl;
3540 int second_time;
3541 {
3542 register tree parm;
3543 register rtx entry_parm = 0;
3544 register rtx stack_parm = 0;
3545 CUMULATIVE_ARGS args_so_far;
3546 enum machine_mode promoted_mode, passed_mode;
3547 enum machine_mode nominal_mode, promoted_nominal_mode;
3548 int unsignedp;
3549 /* Total space needed so far for args on the stack,
3550 given as a constant and a tree-expression. */
3551 struct args_size stack_args_size;
3552 tree fntype = TREE_TYPE (fndecl);
3553 tree fnargs = DECL_ARGUMENTS (fndecl);
3554 /* This is used for the arg pointer when referring to stack args. */
3555 rtx internal_arg_pointer;
3556 /* This is a dummy PARM_DECL that we used for the function result if
3557 the function returns a structure. */
3558 tree function_result_decl = 0;
3559 int varargs_setup = 0;
3560 rtx conversion_insns = 0;
3561
3562 /* Nonzero if the last arg is named `__builtin_va_alist',
3563 which is used on some machines for old-fashioned non-ANSI varargs.h;
3564 this should be stuck onto the stack as if it had arrived there. */
3565 int hide_last_arg
3566 = (current_function_varargs
3567 && fnargs
3568 && (parm = tree_last (fnargs)) != 0
3569 && DECL_NAME (parm)
3570 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3571 "__builtin_va_alist")));
3572
3573 /* Nonzero if function takes extra anonymous args.
3574 This means the last named arg must be on the stack
3575 right before the anonymous ones. */
3576 int stdarg
3577 = (TYPE_ARG_TYPES (fntype) != 0
3578 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3579 != void_type_node));
3580
3581 current_function_stdarg = stdarg;
3582
3583 /* If the reg that the virtual arg pointer will be translated into is
3584 not a fixed reg or is the stack pointer, make a copy of the virtual
3585 arg pointer, and address parms via the copy. The frame pointer is
3586 considered fixed even though it is not marked as such.
3587
3588 The second time through, simply use ap to avoid generating rtx. */
3589
3590 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3591 || ! (fixed_regs[ARG_POINTER_REGNUM]
3592 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3593 && ! second_time)
3594 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3595 else
3596 internal_arg_pointer = virtual_incoming_args_rtx;
3597 current_function_internal_arg_pointer = internal_arg_pointer;
3598
3599 stack_args_size.constant = 0;
3600 stack_args_size.var = 0;
3601
3602 /* If struct value address is treated as the first argument, make it so. */
3603 if (aggregate_value_p (DECL_RESULT (fndecl))
3604 && ! current_function_returns_pcc_struct
3605 && struct_value_incoming_rtx == 0)
3606 {
3607 tree type = build_pointer_type (TREE_TYPE (fntype));
3608
3609 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3610
3611 DECL_ARG_TYPE (function_result_decl) = type;
3612 TREE_CHAIN (function_result_decl) = fnargs;
3613 fnargs = function_result_decl;
3614 }
3615
3616 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3617 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3618 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3619
3620 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3621 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3622 #else
3623 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3624 #endif
3625
3626 /* We haven't yet found an argument that we must push and pretend the
3627 caller did. */
3628 current_function_pretend_args_size = 0;
3629
3630 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3631 {
3632 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3633 struct args_size stack_offset;
3634 struct args_size arg_size;
3635 int passed_pointer = 0;
3636 int did_conversion = 0;
3637 tree passed_type = DECL_ARG_TYPE (parm);
3638 tree nominal_type = TREE_TYPE (parm);
3639
3640 /* Set LAST_NAMED if this is last named arg before some
3641 anonymous args. */
3642 int last_named = ((TREE_CHAIN (parm) == 0
3643 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3644 && (stdarg || current_function_varargs));
3645 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3646 most machines, if this is a varargs/stdarg function, then we treat
3647 the last named arg as if it were anonymous too. */
3648 #ifdef STRICT_ARGUMENT_NAMING
3649 int named_arg = 1;
3650 #else
3651 int named_arg = ! last_named;
3652 #endif
3653 /* If this is a varargs function, then we want to treat the last named
3654 argument as if it was an aggregate, because it might be accessed as
3655 one by the va_arg macros. This is necessary to make the aliasing
3656 code handle this parm correctly. */
3657 if (hide_last_arg && last_named)
3658 aggregate = 1;
3659
3660 if (TREE_TYPE (parm) == error_mark_node
3661 /* This can happen after weird syntax errors
3662 or if an enum type is defined among the parms. */
3663 || TREE_CODE (parm) != PARM_DECL
3664 || passed_type == NULL)
3665 {
3666 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3667 = gen_rtx_MEM (BLKmode, const0_rtx);
3668 TREE_USED (parm) = 1;
3669 continue;
3670 }
3671
3672 /* For varargs.h function, save info about regs and stack space
3673 used by the individual args, not including the va_alist arg. */
3674 if (hide_last_arg && last_named)
3675 current_function_args_info = args_so_far;
3676
3677 /* Find mode of arg as it is passed, and mode of arg
3678 as it should be during execution of this function. */
3679 passed_mode = TYPE_MODE (passed_type);
3680 nominal_mode = TYPE_MODE (nominal_type);
3681
3682 /* If the parm's mode is VOID, its value doesn't matter,
3683 and avoid the usual things like emit_move_insn that could crash. */
3684 if (nominal_mode == VOIDmode)
3685 {
3686 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3687 continue;
3688 }
3689
3690 /* If the parm is to be passed as a transparent union, use the
3691 type of the first field for the tests below. We have already
3692 verified that the modes are the same. */
3693 if (DECL_TRANSPARENT_UNION (parm)
3694 || TYPE_TRANSPARENT_UNION (passed_type))
3695 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3696
3697 /* See if this arg was passed by invisible reference. It is if
3698 it is an object whose size depends on the contents of the
3699 object itself or if the machine requires these objects be passed
3700 that way. */
3701
3702 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3703 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3704 || TREE_ADDRESSABLE (passed_type)
3705 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3706 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3707 passed_type, named_arg)
3708 #endif
3709 )
3710 {
3711 passed_type = nominal_type = build_pointer_type (passed_type);
3712 passed_pointer = 1;
3713 passed_mode = nominal_mode = Pmode;
3714 }
3715
3716 promoted_mode = passed_mode;
3717
3718 #ifdef PROMOTE_FUNCTION_ARGS
3719 /* Compute the mode in which the arg is actually extended to. */
3720 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3721 #endif
3722
3723 /* Let machine desc say which reg (if any) the parm arrives in.
3724 0 means it arrives on the stack. */
3725 #ifdef FUNCTION_INCOMING_ARG
3726 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3727 passed_type, named_arg);
3728 #else
3729 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3730 passed_type, named_arg);
3731 #endif
3732
3733 if (entry_parm == 0)
3734 promoted_mode = passed_mode;
3735
3736 #ifdef SETUP_INCOMING_VARARGS
3737 /* If this is the last named parameter, do any required setup for
3738 varargs or stdargs. We need to know about the case of this being an
3739 addressable type, in which case we skip the registers it
3740 would have arrived in.
3741
3742 For stdargs, LAST_NAMED will be set for two parameters, the one that
3743 is actually the last named, and the dummy parameter. We only
3744 want to do this action once.
3745
3746 Also, indicate when RTL generation is to be suppressed. */
3747 if (last_named && !varargs_setup)
3748 {
3749 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3750 current_function_pretend_args_size,
3751 second_time);
3752 varargs_setup = 1;
3753 }
3754 #endif
3755
3756 /* Determine parm's home in the stack,
3757 in case it arrives in the stack or we should pretend it did.
3758
3759 Compute the stack position and rtx where the argument arrives
3760 and its size.
3761
3762 There is one complexity here: If this was a parameter that would
3763 have been passed in registers, but wasn't only because it is
3764 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3765 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3766 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3767 0 as it was the previous time. */
3768
3769 locate_and_pad_parm (promoted_mode, passed_type,
3770 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3771 1,
3772 #else
3773 #ifdef FUNCTION_INCOMING_ARG
3774 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3775 passed_type,
3776 (named_arg
3777 || varargs_setup)) != 0,
3778 #else
3779 FUNCTION_ARG (args_so_far, promoted_mode,
3780 passed_type,
3781 named_arg || varargs_setup) != 0,
3782 #endif
3783 #endif
3784 fndecl, &stack_args_size, &stack_offset, &arg_size);
3785
3786 if (! second_time)
3787 {
3788 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3789
3790 if (offset_rtx == const0_rtx)
3791 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
3792 else
3793 stack_parm = gen_rtx_MEM (promoted_mode,
3794 gen_rtx_PLUS (Pmode,
3795 internal_arg_pointer,
3796 offset_rtx));
3797
3798 /* If this is a memory ref that contains aggregate components,
3799 mark it as such for cse and loop optimize. Likewise if it
3800 is readonly. */
3801 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3802 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3803 }
3804
3805 /* If this parameter was passed both in registers and in the stack,
3806 use the copy on the stack. */
3807 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3808 entry_parm = 0;
3809
3810 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3811 /* If this parm was passed part in regs and part in memory,
3812 pretend it arrived entirely in memory
3813 by pushing the register-part onto the stack.
3814
3815 In the special case of a DImode or DFmode that is split,
3816 we could put it together in a pseudoreg directly,
3817 but for now that's not worth bothering with. */
3818
3819 if (entry_parm)
3820 {
3821 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3822 passed_type, named_arg);
3823
3824 if (nregs > 0)
3825 {
3826 current_function_pretend_args_size
3827 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3828 / (PARM_BOUNDARY / BITS_PER_UNIT)
3829 * (PARM_BOUNDARY / BITS_PER_UNIT));
3830
3831 if (! second_time)
3832 {
3833 /* Handle calls that pass values in multiple non-contiguous
3834 locations. The Irix 6 ABI has examples of this. */
3835 if (GET_CODE (entry_parm) == PARALLEL)
3836 emit_group_store (validize_mem (stack_parm),
3837 entry_parm);
3838 else
3839 move_block_from_reg (REGNO (entry_parm),
3840 validize_mem (stack_parm), nregs,
3841 int_size_in_bytes (TREE_TYPE (parm)));
3842 }
3843 entry_parm = stack_parm;
3844 }
3845 }
3846 #endif
3847
3848 /* If we didn't decide this parm came in a register,
3849 by default it came on the stack. */
3850 if (entry_parm == 0)
3851 entry_parm = stack_parm;
3852
3853 /* Record permanently how this parm was passed. */
3854 if (! second_time)
3855 DECL_INCOMING_RTL (parm) = entry_parm;
3856
3857 /* If there is actually space on the stack for this parm,
3858 count it in stack_args_size; otherwise set stack_parm to 0
3859 to indicate there is no preallocated stack slot for the parm. */
3860
3861 if (entry_parm == stack_parm
3862 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3863 /* On some machines, even if a parm value arrives in a register
3864 there is still an (uninitialized) stack slot allocated for it.
3865
3866 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3867 whether this parameter already has a stack slot allocated,
3868 because an arg block exists only if current_function_args_size
3869 is larger than some threshold, and we haven't calculated that
3870 yet. So, for now, we just assume that stack slots never exist
3871 in this case. */
3872 || REG_PARM_STACK_SPACE (fndecl) > 0
3873 #endif
3874 )
3875 {
3876 stack_args_size.constant += arg_size.constant;
3877 if (arg_size.var)
3878 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3879 }
3880 else
3881 /* No stack slot was pushed for this parm. */
3882 stack_parm = 0;
3883
3884 /* Update info on where next arg arrives in registers. */
3885
3886 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3887 passed_type, named_arg);
3888
3889 /* If this is our second time through, we are done with this parm. */
3890 if (second_time)
3891 continue;
3892
3893 /* If we can't trust the parm stack slot to be aligned enough
3894 for its ultimate type, don't use that slot after entry.
3895 We'll make another stack slot, if we need one. */
3896 {
3897 int thisparm_boundary
3898 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3899
3900 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3901 stack_parm = 0;
3902 }
3903
3904 /* If parm was passed in memory, and we need to convert it on entry,
3905 don't store it back in that same slot. */
3906 if (entry_parm != 0
3907 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3908 stack_parm = 0;
3909
3910 #if 0
3911 /* Now adjust STACK_PARM to the mode and precise location
3912 where this parameter should live during execution,
3913 if we discover that it must live in the stack during execution.
3914 To make debuggers happier on big-endian machines, we store
3915 the value in the last bytes of the space available. */
3916
3917 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3918 && stack_parm != 0)
3919 {
3920 rtx offset_rtx;
3921
3922 if (BYTES_BIG_ENDIAN
3923 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3924 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3925 - GET_MODE_SIZE (nominal_mode));
3926
3927 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3928 if (offset_rtx == const0_rtx)
3929 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
3930 else
3931 stack_parm = gen_rtx_MEM (nominal_mode,
3932 gen_rtx_PLUS (Pmode,
3933 internal_arg_pointer,
3934 offset_rtx));
3935
3936 /* If this is a memory ref that contains aggregate components,
3937 mark it as such for cse and loop optimize. */
3938 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3939 }
3940 #endif /* 0 */
3941
3942 #ifdef STACK_REGS
3943 /* We need this "use" info, because the gcc-register->stack-register
3944 converter in reg-stack.c needs to know which registers are active
3945 at the start of the function call. The actual parameter loading
3946 instructions are not always available then anymore, since they might
3947 have been optimised away. */
3948
3949 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
3950 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
3951 #endif
3952
3953 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3954 in the mode in which it arrives.
3955 STACK_PARM is an RTX for a stack slot where the parameter can live
3956 during the function (in case we want to put it there).
3957 STACK_PARM is 0 if no stack slot was pushed for it.
3958
3959 Now output code if necessary to convert ENTRY_PARM to
3960 the type in which this function declares it,
3961 and store that result in an appropriate place,
3962 which may be a pseudo reg, may be STACK_PARM,
3963 or may be a local stack slot if STACK_PARM is 0.
3964
3965 Set DECL_RTL to that place. */
3966
3967 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
3968 {
3969 /* If a BLKmode arrives in registers, copy it to a stack slot.
3970 Handle calls that pass values in multiple non-contiguous
3971 locations. The Irix 6 ABI has examples of this. */
3972 if (GET_CODE (entry_parm) == REG
3973 || GET_CODE (entry_parm) == PARALLEL)
3974 {
3975 int size_stored
3976 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3977 UNITS_PER_WORD);
3978
3979 /* Note that we will be storing an integral number of words.
3980 So we have to be careful to ensure that we allocate an
3981 integral number of words. We do this below in the
3982 assign_stack_local if space was not allocated in the argument
3983 list. If it was, this will not work if PARM_BOUNDARY is not
3984 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3985 if it becomes a problem. */
3986
3987 if (stack_parm == 0)
3988 {
3989 stack_parm
3990 = assign_stack_local (GET_MODE (entry_parm),
3991 size_stored, 0);
3992
3993 /* If this is a memory ref that contains aggregate
3994 components, mark it as such for cse and loop optimize. */
3995 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3996 }
3997
3998 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3999 abort ();
4000
4001 if (TREE_READONLY (parm))
4002 RTX_UNCHANGING_P (stack_parm) = 1;
4003
4004 /* Handle calls that pass values in multiple non-contiguous
4005 locations. The Irix 6 ABI has examples of this. */
4006 if (GET_CODE (entry_parm) == PARALLEL)
4007 emit_group_store (validize_mem (stack_parm), entry_parm);
4008 else
4009 move_block_from_reg (REGNO (entry_parm),
4010 validize_mem (stack_parm),
4011 size_stored / UNITS_PER_WORD,
4012 int_size_in_bytes (TREE_TYPE (parm)));
4013 }
4014 DECL_RTL (parm) = stack_parm;
4015 }
4016 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4017 && ! DECL_INLINE (fndecl))
4018 /* layout_decl may set this. */
4019 || TREE_ADDRESSABLE (parm)
4020 || TREE_SIDE_EFFECTS (parm)
4021 /* If -ffloat-store specified, don't put explicit
4022 float variables into registers. */
4023 || (flag_float_store
4024 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4025 /* Always assign pseudo to structure return or item passed
4026 by invisible reference. */
4027 || passed_pointer || parm == function_result_decl)
4028 {
4029 /* Store the parm in a pseudoregister during the function, but we
4030 may need to do it in a wider mode. */
4031
4032 register rtx parmreg;
4033 int regno, regnoi, regnor;
4034
4035 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4036
4037 promoted_nominal_mode
4038 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4039
4040 parmreg = gen_reg_rtx (promoted_nominal_mode);
4041 mark_user_reg (parmreg);
4042
4043 /* If this was an item that we received a pointer to, set DECL_RTL
4044 appropriately. */
4045 if (passed_pointer)
4046 {
4047 DECL_RTL (parm)
4048 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4049 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4050 }
4051 else
4052 DECL_RTL (parm) = parmreg;
4053
4054 /* Copy the value into the register. */
4055 if (nominal_mode != passed_mode
4056 || promoted_nominal_mode != promoted_mode)
4057 {
4058 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4059 mode, by the caller. We now have to convert it to
4060 NOMINAL_MODE, if different. However, PARMREG may be in
4061 a different mode than NOMINAL_MODE if it is being stored
4062 promoted.
4063
4064 If ENTRY_PARM is a hard register, it might be in a register
4065 not valid for operating in its mode (e.g., an odd-numbered
4066 register for a DFmode). In that case, moves are the only
4067 thing valid, so we can't do a convert from there. This
4068 occurs when the calling sequence allow such misaligned
4069 usages.
4070
4071 In addition, the conversion may involve a call, which could
4072 clobber parameters which haven't been copied to pseudo
4073 registers yet. Therefore, we must first copy the parm to
4074 a pseudo reg here, and save the conversion until after all
4075 parameters have been moved. */
4076
4077 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4078
4079 emit_move_insn (tempreg, validize_mem (entry_parm));
4080
4081 push_to_sequence (conversion_insns);
4082 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4083
4084 expand_assignment (parm,
4085 make_tree (nominal_type, tempreg), 0, 0);
4086 conversion_insns = get_insns ();
4087 did_conversion = 1;
4088 end_sequence ();
4089 }
4090 else
4091 emit_move_insn (parmreg, validize_mem (entry_parm));
4092
4093 /* If we were passed a pointer but the actual value
4094 can safely live in a register, put it in one. */
4095 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4096 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4097 && ! DECL_INLINE (fndecl))
4098 /* layout_decl may set this. */
4099 || TREE_ADDRESSABLE (parm)
4100 || TREE_SIDE_EFFECTS (parm)
4101 /* If -ffloat-store specified, don't put explicit
4102 float variables into registers. */
4103 || (flag_float_store
4104 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4105 {
4106 /* We can't use nominal_mode, because it will have been set to
4107 Pmode above. We must use the actual mode of the parm. */
4108 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4109 mark_user_reg (parmreg);
4110 emit_move_insn (parmreg, DECL_RTL (parm));
4111 DECL_RTL (parm) = parmreg;
4112 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4113 now the parm. */
4114 stack_parm = 0;
4115 }
4116 #ifdef FUNCTION_ARG_CALLEE_COPIES
4117 /* If we are passed an arg by reference and it is our responsibility
4118 to make a copy, do it now.
4119 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4120 original argument, so we must recreate them in the call to
4121 FUNCTION_ARG_CALLEE_COPIES. */
4122 /* ??? Later add code to handle the case that if the argument isn't
4123 modified, don't do the copy. */
4124
4125 else if (passed_pointer
4126 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4127 TYPE_MODE (DECL_ARG_TYPE (parm)),
4128 DECL_ARG_TYPE (parm),
4129 named_arg)
4130 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4131 {
4132 rtx copy;
4133 tree type = DECL_ARG_TYPE (parm);
4134
4135 /* This sequence may involve a library call perhaps clobbering
4136 registers that haven't been copied to pseudos yet. */
4137
4138 push_to_sequence (conversion_insns);
4139
4140 if (TYPE_SIZE (type) == 0
4141 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4142 /* This is a variable sized object. */
4143 copy = gen_rtx_MEM (BLKmode,
4144 allocate_dynamic_stack_space
4145 (expr_size (parm), NULL_RTX,
4146 TYPE_ALIGN (type)));
4147 else
4148 copy = assign_stack_temp (TYPE_MODE (type),
4149 int_size_in_bytes (type), 1);
4150 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4151 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4152
4153 store_expr (parm, copy, 0);
4154 emit_move_insn (parmreg, XEXP (copy, 0));
4155 if (flag_check_memory_usage)
4156 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4157 XEXP (copy, 0), ptr_mode,
4158 GEN_INT (int_size_in_bytes (type)),
4159 TYPE_MODE (sizetype),
4160 GEN_INT (MEMORY_USE_RW),
4161 TYPE_MODE (integer_type_node));
4162 conversion_insns = get_insns ();
4163 did_conversion = 1;
4164 end_sequence ();
4165 }
4166 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4167
4168 /* In any case, record the parm's desired stack location
4169 in case we later discover it must live in the stack.
4170
4171 If it is a COMPLEX value, store the stack location for both
4172 halves. */
4173
4174 if (GET_CODE (parmreg) == CONCAT)
4175 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4176 else
4177 regno = REGNO (parmreg);
4178
4179 if (regno >= max_parm_reg)
4180 {
4181 rtx *new;
4182 int old_max_parm_reg = max_parm_reg;
4183
4184 /* It's slow to expand this one register at a time,
4185 but it's also rare and we need max_parm_reg to be
4186 precisely correct. */
4187 max_parm_reg = regno + 1;
4188 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4189 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4190 old_max_parm_reg * sizeof (rtx));
4191 bzero ((char *) (new + old_max_parm_reg),
4192 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4193 parm_reg_stack_loc = new;
4194 }
4195
4196 if (GET_CODE (parmreg) == CONCAT)
4197 {
4198 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4199
4200 regnor = REGNO (gen_realpart (submode, parmreg));
4201 regnoi = REGNO (gen_imagpart (submode, parmreg));
4202
4203 if (stack_parm != 0)
4204 {
4205 parm_reg_stack_loc[regnor]
4206 = gen_realpart (submode, stack_parm);
4207 parm_reg_stack_loc[regnoi]
4208 = gen_imagpart (submode, stack_parm);
4209 }
4210 else
4211 {
4212 parm_reg_stack_loc[regnor] = 0;
4213 parm_reg_stack_loc[regnoi] = 0;
4214 }
4215 }
4216 else
4217 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4218
4219 /* Mark the register as eliminable if we did no conversion
4220 and it was copied from memory at a fixed offset,
4221 and the arg pointer was not copied to a pseudo-reg.
4222 If the arg pointer is a pseudo reg or the offset formed
4223 an invalid address, such memory-equivalences
4224 as we make here would screw up life analysis for it. */
4225 if (nominal_mode == passed_mode
4226 && ! did_conversion
4227 && stack_parm != 0
4228 && GET_CODE (stack_parm) == MEM
4229 && stack_offset.var == 0
4230 && reg_mentioned_p (virtual_incoming_args_rtx,
4231 XEXP (stack_parm, 0)))
4232 {
4233 rtx linsn = get_last_insn ();
4234 rtx sinsn, set;
4235
4236 /* Mark complex types separately. */
4237 if (GET_CODE (parmreg) == CONCAT)
4238 /* Scan backwards for the set of the real and
4239 imaginary parts. */
4240 for (sinsn = linsn; sinsn != 0;
4241 sinsn = prev_nonnote_insn (sinsn))
4242 {
4243 set = single_set (sinsn);
4244 if (set != 0
4245 && SET_DEST (set) == regno_reg_rtx [regnoi])
4246 REG_NOTES (sinsn)
4247 = gen_rtx_EXPR_LIST (REG_EQUIV,
4248 parm_reg_stack_loc[regnoi],
4249 REG_NOTES (sinsn));
4250 else if (set != 0
4251 && SET_DEST (set) == regno_reg_rtx [regnor])
4252 REG_NOTES (sinsn)
4253 = gen_rtx_EXPR_LIST (REG_EQUIV,
4254 parm_reg_stack_loc[regnor],
4255 REG_NOTES (sinsn));
4256 }
4257 else if ((set = single_set (linsn)) != 0
4258 && SET_DEST (set) == parmreg)
4259 REG_NOTES (linsn)
4260 = gen_rtx_EXPR_LIST (REG_EQUIV,
4261 stack_parm, REG_NOTES (linsn));
4262 }
4263
4264 /* For pointer data type, suggest pointer register. */
4265 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
4266 mark_reg_pointer (parmreg,
4267 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4268 / BITS_PER_UNIT));
4269 }
4270 else
4271 {
4272 /* Value must be stored in the stack slot STACK_PARM
4273 during function execution. */
4274
4275 if (promoted_mode != nominal_mode)
4276 {
4277 /* Conversion is required. */
4278 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4279
4280 emit_move_insn (tempreg, validize_mem (entry_parm));
4281
4282 push_to_sequence (conversion_insns);
4283 entry_parm = convert_to_mode (nominal_mode, tempreg,
4284 TREE_UNSIGNED (TREE_TYPE (parm)));
4285 if (stack_parm)
4286 {
4287 /* ??? This may need a big-endian conversion on sparc64. */
4288 stack_parm = change_address (stack_parm, nominal_mode,
4289 NULL_RTX);
4290 }
4291 conversion_insns = get_insns ();
4292 did_conversion = 1;
4293 end_sequence ();
4294 }
4295
4296 if (entry_parm != stack_parm)
4297 {
4298 if (stack_parm == 0)
4299 {
4300 stack_parm
4301 = assign_stack_local (GET_MODE (entry_parm),
4302 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4303 /* If this is a memory ref that contains aggregate components,
4304 mark it as such for cse and loop optimize. */
4305 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4306 }
4307
4308 if (promoted_mode != nominal_mode)
4309 {
4310 push_to_sequence (conversion_insns);
4311 emit_move_insn (validize_mem (stack_parm),
4312 validize_mem (entry_parm));
4313 conversion_insns = get_insns ();
4314 end_sequence ();
4315 }
4316 else
4317 emit_move_insn (validize_mem (stack_parm),
4318 validize_mem (entry_parm));
4319 }
4320 if (flag_check_memory_usage
4321 && entry_parm != stack_parm
4322 && promoted_mode != nominal_mode)
4323 {
4324 push_to_sequence (conversion_insns);
4325 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4326 XEXP (stack_parm, 0), ptr_mode,
4327 GEN_INT (GET_MODE_SIZE (GET_MODE
4328 (entry_parm))),
4329 TYPE_MODE (sizetype),
4330 GEN_INT (MEMORY_USE_RW),
4331 TYPE_MODE (integer_type_node));
4332
4333 conversion_insns = get_insns ();
4334 end_sequence ();
4335 }
4336 DECL_RTL (parm) = stack_parm;
4337 }
4338
4339 /* If this "parameter" was the place where we are receiving the
4340 function's incoming structure pointer, set up the result. */
4341 if (parm == function_result_decl)
4342 {
4343 tree result = DECL_RESULT (fndecl);
4344 tree restype = TREE_TYPE (result);
4345
4346 DECL_RTL (result)
4347 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4348
4349 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4350 }
4351
4352 if (TREE_THIS_VOLATILE (parm))
4353 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4354 if (TREE_READONLY (parm))
4355 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4356 }
4357
4358 /* Output all parameter conversion instructions (possibly including calls)
4359 now that all parameters have been copied out of hard registers. */
4360 emit_insns (conversion_insns);
4361
4362 last_parm_insn = get_last_insn ();
4363
4364 current_function_args_size = stack_args_size.constant;
4365
4366 /* Adjust function incoming argument size for alignment and
4367 minimum length. */
4368
4369 #ifdef REG_PARM_STACK_SPACE
4370 #ifndef MAYBE_REG_PARM_STACK_SPACE
4371 current_function_args_size = MAX (current_function_args_size,
4372 REG_PARM_STACK_SPACE (fndecl));
4373 #endif
4374 #endif
4375
4376 #ifdef STACK_BOUNDARY
4377 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4378
4379 current_function_args_size
4380 = ((current_function_args_size + STACK_BYTES - 1)
4381 / STACK_BYTES) * STACK_BYTES;
4382 #endif
4383
4384 #ifdef ARGS_GROW_DOWNWARD
4385 current_function_arg_offset_rtx
4386 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4387 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4388 size_int (-stack_args_size.constant)),
4389 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4390 #else
4391 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4392 #endif
4393
4394 /* See how many bytes, if any, of its args a function should try to pop
4395 on return. */
4396
4397 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4398 current_function_args_size);
4399
4400 /* For stdarg.h function, save info about
4401 regs and stack space used by the named args. */
4402
4403 if (!hide_last_arg)
4404 current_function_args_info = args_so_far;
4405
4406 /* Set the rtx used for the function return value. Put this in its
4407 own variable so any optimizers that need this information don't have
4408 to include tree.h. Do this here so it gets done when an inlined
4409 function gets output. */
4410
4411 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4412 }
4413 \f
4414 /* Indicate whether REGNO is an incoming argument to the current function
4415 that was promoted to a wider mode. If so, return the RTX for the
4416 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4417 that REGNO is promoted from and whether the promotion was signed or
4418 unsigned. */
4419
4420 #ifdef PROMOTE_FUNCTION_ARGS
4421
4422 rtx
4423 promoted_input_arg (regno, pmode, punsignedp)
4424 int regno;
4425 enum machine_mode *pmode;
4426 int *punsignedp;
4427 {
4428 tree arg;
4429
4430 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4431 arg = TREE_CHAIN (arg))
4432 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4433 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4434 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4435 {
4436 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4437 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4438
4439 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4440 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4441 && mode != DECL_MODE (arg))
4442 {
4443 *pmode = DECL_MODE (arg);
4444 *punsignedp = unsignedp;
4445 return DECL_INCOMING_RTL (arg);
4446 }
4447 }
4448
4449 return 0;
4450 }
4451
4452 #endif
4453 \f
4454 /* Compute the size and offset from the start of the stacked arguments for a
4455 parm passed in mode PASSED_MODE and with type TYPE.
4456
4457 INITIAL_OFFSET_PTR points to the current offset into the stacked
4458 arguments.
4459
4460 The starting offset and size for this parm are returned in *OFFSET_PTR
4461 and *ARG_SIZE_PTR, respectively.
4462
4463 IN_REGS is non-zero if the argument will be passed in registers. It will
4464 never be set if REG_PARM_STACK_SPACE is not defined.
4465
4466 FNDECL is the function in which the argument was defined.
4467
4468 There are two types of rounding that are done. The first, controlled by
4469 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4470 list to be aligned to the specific boundary (in bits). This rounding
4471 affects the initial and starting offsets, but not the argument size.
4472
4473 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4474 optionally rounds the size of the parm to PARM_BOUNDARY. The
4475 initial offset is not affected by this rounding, while the size always
4476 is and the starting offset may be. */
4477
4478 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4479 initial_offset_ptr is positive because locate_and_pad_parm's
4480 callers pass in the total size of args so far as
4481 initial_offset_ptr. arg_size_ptr is always positive.*/
4482
4483 void
4484 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4485 initial_offset_ptr, offset_ptr, arg_size_ptr)
4486 enum machine_mode passed_mode;
4487 tree type;
4488 int in_regs;
4489 tree fndecl;
4490 struct args_size *initial_offset_ptr;
4491 struct args_size *offset_ptr;
4492 struct args_size *arg_size_ptr;
4493 {
4494 tree sizetree
4495 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4496 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4497 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4498
4499 #ifdef REG_PARM_STACK_SPACE
4500 /* If we have found a stack parm before we reach the end of the
4501 area reserved for registers, skip that area. */
4502 if (! in_regs)
4503 {
4504 int reg_parm_stack_space = 0;
4505
4506 #ifdef MAYBE_REG_PARM_STACK_SPACE
4507 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4508 #else
4509 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4510 #endif
4511 if (reg_parm_stack_space > 0)
4512 {
4513 if (initial_offset_ptr->var)
4514 {
4515 initial_offset_ptr->var
4516 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4517 size_int (reg_parm_stack_space));
4518 initial_offset_ptr->constant = 0;
4519 }
4520 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4521 initial_offset_ptr->constant = reg_parm_stack_space;
4522 }
4523 }
4524 #endif /* REG_PARM_STACK_SPACE */
4525
4526 arg_size_ptr->var = 0;
4527 arg_size_ptr->constant = 0;
4528
4529 #ifdef ARGS_GROW_DOWNWARD
4530 if (initial_offset_ptr->var)
4531 {
4532 offset_ptr->constant = 0;
4533 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4534 initial_offset_ptr->var);
4535 }
4536 else
4537 {
4538 offset_ptr->constant = - initial_offset_ptr->constant;
4539 offset_ptr->var = 0;
4540 }
4541 if (where_pad != none
4542 && (TREE_CODE (sizetree) != INTEGER_CST
4543 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4544 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4545 SUB_PARM_SIZE (*offset_ptr, sizetree);
4546 if (where_pad != downward)
4547 pad_to_arg_alignment (offset_ptr, boundary);
4548 if (initial_offset_ptr->var)
4549 {
4550 arg_size_ptr->var = size_binop (MINUS_EXPR,
4551 size_binop (MINUS_EXPR,
4552 integer_zero_node,
4553 initial_offset_ptr->var),
4554 offset_ptr->var);
4555 }
4556 else
4557 {
4558 arg_size_ptr->constant = (- initial_offset_ptr->constant
4559 - offset_ptr->constant);
4560 }
4561 #else /* !ARGS_GROW_DOWNWARD */
4562 pad_to_arg_alignment (initial_offset_ptr, boundary);
4563 *offset_ptr = *initial_offset_ptr;
4564
4565 #ifdef PUSH_ROUNDING
4566 if (passed_mode != BLKmode)
4567 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4568 #endif
4569
4570 /* Pad_below needs the pre-rounded size to know how much to pad below
4571 so this must be done before rounding up. */
4572 if (where_pad == downward
4573 /* However, BLKmode args passed in regs have their padding done elsewhere.
4574 The stack slot must be able to hold the entire register. */
4575 && !(in_regs && passed_mode == BLKmode))
4576 pad_below (offset_ptr, passed_mode, sizetree);
4577
4578 if (where_pad != none
4579 && (TREE_CODE (sizetree) != INTEGER_CST
4580 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4581 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4582
4583 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4584 #endif /* ARGS_GROW_DOWNWARD */
4585 }
4586
4587 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4588 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4589
4590 static void
4591 pad_to_arg_alignment (offset_ptr, boundary)
4592 struct args_size *offset_ptr;
4593 int boundary;
4594 {
4595 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4596
4597 if (boundary > BITS_PER_UNIT)
4598 {
4599 if (offset_ptr->var)
4600 {
4601 offset_ptr->var =
4602 #ifdef ARGS_GROW_DOWNWARD
4603 round_down
4604 #else
4605 round_up
4606 #endif
4607 (ARGS_SIZE_TREE (*offset_ptr),
4608 boundary / BITS_PER_UNIT);
4609 offset_ptr->constant = 0; /*?*/
4610 }
4611 else
4612 offset_ptr->constant =
4613 #ifdef ARGS_GROW_DOWNWARD
4614 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4615 #else
4616 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4617 #endif
4618 }
4619 }
4620
4621 #ifndef ARGS_GROW_DOWNWARD
4622 static void
4623 pad_below (offset_ptr, passed_mode, sizetree)
4624 struct args_size *offset_ptr;
4625 enum machine_mode passed_mode;
4626 tree sizetree;
4627 {
4628 if (passed_mode != BLKmode)
4629 {
4630 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4631 offset_ptr->constant
4632 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4633 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4634 - GET_MODE_SIZE (passed_mode));
4635 }
4636 else
4637 {
4638 if (TREE_CODE (sizetree) != INTEGER_CST
4639 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4640 {
4641 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4642 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4643 /* Add it in. */
4644 ADD_PARM_SIZE (*offset_ptr, s2);
4645 SUB_PARM_SIZE (*offset_ptr, sizetree);
4646 }
4647 }
4648 }
4649 #endif
4650
4651 static tree
4652 round_down (value, divisor)
4653 tree value;
4654 int divisor;
4655 {
4656 return size_binop (MULT_EXPR,
4657 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4658 size_int (divisor));
4659 }
4660 \f
4661 /* Walk the tree of blocks describing the binding levels within a function
4662 and warn about uninitialized variables.
4663 This is done after calling flow_analysis and before global_alloc
4664 clobbers the pseudo-regs to hard regs. */
4665
4666 void
4667 uninitialized_vars_warning (block)
4668 tree block;
4669 {
4670 register tree decl, sub;
4671 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4672 {
4673 if (TREE_CODE (decl) == VAR_DECL
4674 /* These warnings are unreliable for and aggregates
4675 because assigning the fields one by one can fail to convince
4676 flow.c that the entire aggregate was initialized.
4677 Unions are troublesome because members may be shorter. */
4678 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4679 && DECL_RTL (decl) != 0
4680 && GET_CODE (DECL_RTL (decl)) == REG
4681 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4682 warning_with_decl (decl,
4683 "`%s' might be used uninitialized in this function");
4684 if (TREE_CODE (decl) == VAR_DECL
4685 && DECL_RTL (decl) != 0
4686 && GET_CODE (DECL_RTL (decl)) == REG
4687 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4688 warning_with_decl (decl,
4689 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4690 }
4691 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4692 uninitialized_vars_warning (sub);
4693 }
4694
4695 /* Do the appropriate part of uninitialized_vars_warning
4696 but for arguments instead of local variables. */
4697
4698 void
4699 setjmp_args_warning ()
4700 {
4701 register tree decl;
4702 for (decl = DECL_ARGUMENTS (current_function_decl);
4703 decl; decl = TREE_CHAIN (decl))
4704 if (DECL_RTL (decl) != 0
4705 && GET_CODE (DECL_RTL (decl)) == REG
4706 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4707 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4708 }
4709
4710 /* If this function call setjmp, put all vars into the stack
4711 unless they were declared `register'. */
4712
4713 void
4714 setjmp_protect (block)
4715 tree block;
4716 {
4717 register tree decl, sub;
4718 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4719 if ((TREE_CODE (decl) == VAR_DECL
4720 || TREE_CODE (decl) == PARM_DECL)
4721 && DECL_RTL (decl) != 0
4722 && (GET_CODE (DECL_RTL (decl)) == REG
4723 || (GET_CODE (DECL_RTL (decl)) == MEM
4724 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4725 /* If this variable came from an inline function, it must be
4726 that it's life doesn't overlap the setjmp. If there was a
4727 setjmp in the function, it would already be in memory. We
4728 must exclude such variable because their DECL_RTL might be
4729 set to strange things such as virtual_stack_vars_rtx. */
4730 && ! DECL_FROM_INLINE (decl)
4731 && (
4732 #ifdef NON_SAVING_SETJMP
4733 /* If longjmp doesn't restore the registers,
4734 don't put anything in them. */
4735 NON_SAVING_SETJMP
4736 ||
4737 #endif
4738 ! DECL_REGISTER (decl)))
4739 put_var_into_stack (decl);
4740 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4741 setjmp_protect (sub);
4742 }
4743 \f
4744 /* Like the previous function, but for args instead of local variables. */
4745
4746 void
4747 setjmp_protect_args ()
4748 {
4749 register tree decl;
4750 for (decl = DECL_ARGUMENTS (current_function_decl);
4751 decl; decl = TREE_CHAIN (decl))
4752 if ((TREE_CODE (decl) == VAR_DECL
4753 || TREE_CODE (decl) == PARM_DECL)
4754 && DECL_RTL (decl) != 0
4755 && (GET_CODE (DECL_RTL (decl)) == REG
4756 || (GET_CODE (DECL_RTL (decl)) == MEM
4757 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4758 && (
4759 /* If longjmp doesn't restore the registers,
4760 don't put anything in them. */
4761 #ifdef NON_SAVING_SETJMP
4762 NON_SAVING_SETJMP
4763 ||
4764 #endif
4765 ! DECL_REGISTER (decl)))
4766 put_var_into_stack (decl);
4767 }
4768 \f
4769 /* Return the context-pointer register corresponding to DECL,
4770 or 0 if it does not need one. */
4771
4772 rtx
4773 lookup_static_chain (decl)
4774 tree decl;
4775 {
4776 tree context = decl_function_context (decl);
4777 tree link;
4778
4779 if (context == 0
4780 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4781 return 0;
4782
4783 /* We treat inline_function_decl as an alias for the current function
4784 because that is the inline function whose vars, types, etc.
4785 are being merged into the current function.
4786 See expand_inline_function. */
4787 if (context == current_function_decl || context == inline_function_decl)
4788 return virtual_stack_vars_rtx;
4789
4790 for (link = context_display; link; link = TREE_CHAIN (link))
4791 if (TREE_PURPOSE (link) == context)
4792 return RTL_EXPR_RTL (TREE_VALUE (link));
4793
4794 abort ();
4795 }
4796 \f
4797 /* Convert a stack slot address ADDR for variable VAR
4798 (from a containing function)
4799 into an address valid in this function (using a static chain). */
4800
4801 rtx
4802 fix_lexical_addr (addr, var)
4803 rtx addr;
4804 tree var;
4805 {
4806 rtx basereg;
4807 int displacement;
4808 tree context = decl_function_context (var);
4809 struct function *fp;
4810 rtx base = 0;
4811
4812 /* If this is the present function, we need not do anything. */
4813 if (context == current_function_decl || context == inline_function_decl)
4814 return addr;
4815
4816 for (fp = outer_function_chain; fp; fp = fp->next)
4817 if (fp->decl == context)
4818 break;
4819
4820 if (fp == 0)
4821 abort ();
4822
4823 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
4824 addr = XEXP (XEXP (addr, 0), 0);
4825
4826 /* Decode given address as base reg plus displacement. */
4827 if (GET_CODE (addr) == REG)
4828 basereg = addr, displacement = 0;
4829 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4830 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4831 else
4832 abort ();
4833
4834 /* We accept vars reached via the containing function's
4835 incoming arg pointer and via its stack variables pointer. */
4836 if (basereg == fp->internal_arg_pointer)
4837 {
4838 /* If reached via arg pointer, get the arg pointer value
4839 out of that function's stack frame.
4840
4841 There are two cases: If a separate ap is needed, allocate a
4842 slot in the outer function for it and dereference it that way.
4843 This is correct even if the real ap is actually a pseudo.
4844 Otherwise, just adjust the offset from the frame pointer to
4845 compensate. */
4846
4847 #ifdef NEED_SEPARATE_AP
4848 rtx addr;
4849
4850 if (fp->arg_pointer_save_area == 0)
4851 fp->arg_pointer_save_area
4852 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4853
4854 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4855 addr = memory_address (Pmode, addr);
4856
4857 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
4858 #else
4859 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4860 base = lookup_static_chain (var);
4861 #endif
4862 }
4863
4864 else if (basereg == virtual_stack_vars_rtx)
4865 {
4866 /* This is the same code as lookup_static_chain, duplicated here to
4867 avoid an extra call to decl_function_context. */
4868 tree link;
4869
4870 for (link = context_display; link; link = TREE_CHAIN (link))
4871 if (TREE_PURPOSE (link) == context)
4872 {
4873 base = RTL_EXPR_RTL (TREE_VALUE (link));
4874 break;
4875 }
4876 }
4877
4878 if (base == 0)
4879 abort ();
4880
4881 /* Use same offset, relative to appropriate static chain or argument
4882 pointer. */
4883 return plus_constant (base, displacement);
4884 }
4885 \f
4886 /* Return the address of the trampoline for entering nested fn FUNCTION.
4887 If necessary, allocate a trampoline (in the stack frame)
4888 and emit rtl to initialize its contents (at entry to this function). */
4889
4890 rtx
4891 trampoline_address (function)
4892 tree function;
4893 {
4894 tree link;
4895 tree rtlexp;
4896 rtx tramp;
4897 struct function *fp;
4898 tree fn_context;
4899
4900 /* Find an existing trampoline and return it. */
4901 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4902 if (TREE_PURPOSE (link) == function)
4903 return
4904 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4905
4906 for (fp = outer_function_chain; fp; fp = fp->next)
4907 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4908 if (TREE_PURPOSE (link) == function)
4909 {
4910 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4911 function);
4912 return round_trampoline_addr (tramp);
4913 }
4914
4915 /* None exists; we must make one. */
4916
4917 /* Find the `struct function' for the function containing FUNCTION. */
4918 fp = 0;
4919 fn_context = decl_function_context (function);
4920 if (fn_context != current_function_decl
4921 && fn_context != inline_function_decl)
4922 for (fp = outer_function_chain; fp; fp = fp->next)
4923 if (fp->decl == fn_context)
4924 break;
4925
4926 /* Allocate run-time space for this trampoline
4927 (usually in the defining function's stack frame). */
4928 #ifdef ALLOCATE_TRAMPOLINE
4929 tramp = ALLOCATE_TRAMPOLINE (fp);
4930 #else
4931 /* If rounding needed, allocate extra space
4932 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4933 #ifdef TRAMPOLINE_ALIGNMENT
4934 #define TRAMPOLINE_REAL_SIZE \
4935 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4936 #else
4937 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4938 #endif
4939 if (fp != 0)
4940 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4941 else
4942 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4943 #endif
4944
4945 /* Record the trampoline for reuse and note it for later initialization
4946 by expand_function_end. */
4947 if (fp != 0)
4948 {
4949 push_obstacks (fp->function_maybepermanent_obstack,
4950 fp->function_maybepermanent_obstack);
4951 rtlexp = make_node (RTL_EXPR);
4952 RTL_EXPR_RTL (rtlexp) = tramp;
4953 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4954 pop_obstacks ();
4955 }
4956 else
4957 {
4958 /* Make the RTL_EXPR node temporary, not momentary, so that the
4959 trampoline_list doesn't become garbage. */
4960 int momentary = suspend_momentary ();
4961 rtlexp = make_node (RTL_EXPR);
4962 resume_momentary (momentary);
4963
4964 RTL_EXPR_RTL (rtlexp) = tramp;
4965 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4966 }
4967
4968 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4969 return round_trampoline_addr (tramp);
4970 }
4971
4972 /* Given a trampoline address,
4973 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4974
4975 static rtx
4976 round_trampoline_addr (tramp)
4977 rtx tramp;
4978 {
4979 #ifdef TRAMPOLINE_ALIGNMENT
4980 /* Round address up to desired boundary. */
4981 rtx temp = gen_reg_rtx (Pmode);
4982 temp = expand_binop (Pmode, add_optab, tramp,
4983 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
4984 temp, 0, OPTAB_LIB_WIDEN);
4985 tramp = expand_binop (Pmode, and_optab, temp,
4986 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
4987 temp, 0, OPTAB_LIB_WIDEN);
4988 #endif
4989 return tramp;
4990 }
4991 \f
4992 /* The functions identify_blocks and reorder_blocks provide a way to
4993 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4994 duplicate portions of the RTL code. Call identify_blocks before
4995 changing the RTL, and call reorder_blocks after. */
4996
4997 /* Put all this function's BLOCK nodes including those that are chained
4998 onto the first block into a vector, and return it.
4999 Also store in each NOTE for the beginning or end of a block
5000 the index of that block in the vector.
5001 The arguments are BLOCK, the chain of top-level blocks of the function,
5002 and INSNS, the insn chain of the function. */
5003
5004 tree *
5005 identify_blocks (block, insns)
5006 tree block;
5007 rtx insns;
5008 {
5009 int n_blocks;
5010 tree *block_vector;
5011 int *block_stack;
5012 int depth = 0;
5013 int next_block_number = 1;
5014 int current_block_number = 1;
5015 rtx insn;
5016
5017 if (block == 0)
5018 return 0;
5019
5020 n_blocks = all_blocks (block, 0);
5021 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5022 block_stack = (int *) alloca (n_blocks * sizeof (int));
5023
5024 all_blocks (block, block_vector);
5025
5026 for (insn = insns; insn; insn = NEXT_INSN (insn))
5027 if (GET_CODE (insn) == NOTE)
5028 {
5029 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5030 {
5031 block_stack[depth++] = current_block_number;
5032 current_block_number = next_block_number;
5033 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5034 }
5035 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5036 {
5037 current_block_number = block_stack[--depth];
5038 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5039 }
5040 }
5041
5042 if (n_blocks != next_block_number)
5043 abort ();
5044
5045 return block_vector;
5046 }
5047
5048 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5049 and a revised instruction chain, rebuild the tree structure
5050 of BLOCK nodes to correspond to the new order of RTL.
5051 The new block tree is inserted below TOP_BLOCK.
5052 Returns the current top-level block. */
5053
5054 tree
5055 reorder_blocks (block_vector, block, insns)
5056 tree *block_vector;
5057 tree block;
5058 rtx insns;
5059 {
5060 tree current_block = block;
5061 rtx insn;
5062
5063 if (block_vector == 0)
5064 return block;
5065
5066 /* Prune the old trees away, so that it doesn't get in the way. */
5067 BLOCK_SUBBLOCKS (current_block) = 0;
5068 BLOCK_CHAIN (current_block) = 0;
5069
5070 for (insn = insns; insn; insn = NEXT_INSN (insn))
5071 if (GET_CODE (insn) == NOTE)
5072 {
5073 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5074 {
5075 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5076 /* If we have seen this block before, copy it. */
5077 if (TREE_ASM_WRITTEN (block))
5078 block = copy_node (block);
5079 BLOCK_SUBBLOCKS (block) = 0;
5080 TREE_ASM_WRITTEN (block) = 1;
5081 BLOCK_SUPERCONTEXT (block) = current_block;
5082 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5083 BLOCK_SUBBLOCKS (current_block) = block;
5084 current_block = block;
5085 NOTE_SOURCE_FILE (insn) = 0;
5086 }
5087 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5088 {
5089 BLOCK_SUBBLOCKS (current_block)
5090 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5091 current_block = BLOCK_SUPERCONTEXT (current_block);
5092 NOTE_SOURCE_FILE (insn) = 0;
5093 }
5094 }
5095
5096 BLOCK_SUBBLOCKS (current_block)
5097 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5098 return current_block;
5099 }
5100
5101 /* Reverse the order of elements in the chain T of blocks,
5102 and return the new head of the chain (old last element). */
5103
5104 static tree
5105 blocks_nreverse (t)
5106 tree t;
5107 {
5108 register tree prev = 0, decl, next;
5109 for (decl = t; decl; decl = next)
5110 {
5111 next = BLOCK_CHAIN (decl);
5112 BLOCK_CHAIN (decl) = prev;
5113 prev = decl;
5114 }
5115 return prev;
5116 }
5117
5118 /* Count the subblocks of the list starting with BLOCK, and list them
5119 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5120 blocks. */
5121
5122 static int
5123 all_blocks (block, vector)
5124 tree block;
5125 tree *vector;
5126 {
5127 int n_blocks = 0;
5128
5129 while (block)
5130 {
5131 TREE_ASM_WRITTEN (block) = 0;
5132
5133 /* Record this block. */
5134 if (vector)
5135 vector[n_blocks] = block;
5136
5137 ++n_blocks;
5138
5139 /* Record the subblocks, and their subblocks... */
5140 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5141 vector ? vector + n_blocks : 0);
5142 block = BLOCK_CHAIN (block);
5143 }
5144
5145 return n_blocks;
5146 }
5147 \f
5148 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5149 and initialize static variables for generating RTL for the statements
5150 of the function. */
5151
5152 void
5153 init_function_start (subr, filename, line)
5154 tree subr;
5155 char *filename;
5156 int line;
5157 {
5158 init_stmt_for_function ();
5159
5160 cse_not_expected = ! optimize;
5161
5162 /* Caller save not needed yet. */
5163 caller_save_needed = 0;
5164
5165 /* No stack slots have been made yet. */
5166 stack_slot_list = 0;
5167
5168 /* There is no stack slot for handling nonlocal gotos. */
5169 nonlocal_goto_handler_slot = 0;
5170 nonlocal_goto_stack_level = 0;
5171
5172 /* No labels have been declared for nonlocal use. */
5173 nonlocal_labels = 0;
5174
5175 /* No function calls so far in this function. */
5176 function_call_count = 0;
5177
5178 /* No parm regs have been allocated.
5179 (This is important for output_inline_function.) */
5180 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5181
5182 /* Initialize the RTL mechanism. */
5183 init_emit ();
5184
5185 /* Initialize the queue of pending postincrement and postdecrements,
5186 and some other info in expr.c. */
5187 init_expr ();
5188
5189 /* We haven't done register allocation yet. */
5190 reg_renumber = 0;
5191
5192 init_const_rtx_hash_table ();
5193
5194 current_function_name = (*decl_printable_name) (subr, 2);
5195
5196 /* Nonzero if this is a nested function that uses a static chain. */
5197
5198 current_function_needs_context
5199 = (decl_function_context (current_function_decl) != 0
5200 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5201
5202 /* Set if a call to setjmp is seen. */
5203 current_function_calls_setjmp = 0;
5204
5205 /* Set if a call to longjmp is seen. */
5206 current_function_calls_longjmp = 0;
5207
5208 current_function_calls_alloca = 0;
5209 current_function_has_nonlocal_label = 0;
5210 current_function_has_nonlocal_goto = 0;
5211 current_function_contains_functions = 0;
5212 current_function_is_thunk = 0;
5213
5214 current_function_returns_pcc_struct = 0;
5215 current_function_returns_struct = 0;
5216 current_function_epilogue_delay_list = 0;
5217 current_function_uses_const_pool = 0;
5218 current_function_uses_pic_offset_table = 0;
5219
5220 /* We have not yet needed to make a label to jump to for tail-recursion. */
5221 tail_recursion_label = 0;
5222
5223 /* We haven't had a need to make a save area for ap yet. */
5224
5225 arg_pointer_save_area = 0;
5226
5227 /* No stack slots allocated yet. */
5228 frame_offset = 0;
5229
5230 /* No SAVE_EXPRs in this function yet. */
5231 save_expr_regs = 0;
5232
5233 /* No RTL_EXPRs in this function yet. */
5234 rtl_expr_chain = 0;
5235
5236 /* Set up to allocate temporaries. */
5237 init_temp_slots ();
5238
5239 /* Within function body, compute a type's size as soon it is laid out. */
5240 immediate_size_expand++;
5241
5242 /* We haven't made any trampolines for this function yet. */
5243 trampoline_list = 0;
5244
5245 init_pending_stack_adjust ();
5246 inhibit_defer_pop = 0;
5247
5248 current_function_outgoing_args_size = 0;
5249
5250 /* Prevent ever trying to delete the first instruction of a function.
5251 Also tell final how to output a linenum before the function prologue. */
5252 emit_line_note (filename, line);
5253
5254 /* Make sure first insn is a note even if we don't want linenums.
5255 This makes sure the first insn will never be deleted.
5256 Also, final expects a note to appear there. */
5257 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5258
5259 /* Set flags used by final.c. */
5260 if (aggregate_value_p (DECL_RESULT (subr)))
5261 {
5262 #ifdef PCC_STATIC_STRUCT_RETURN
5263 current_function_returns_pcc_struct = 1;
5264 #endif
5265 current_function_returns_struct = 1;
5266 }
5267
5268 /* Warn if this value is an aggregate type,
5269 regardless of which calling convention we are using for it. */
5270 if (warn_aggregate_return
5271 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5272 warning ("function returns an aggregate");
5273
5274 current_function_returns_pointer
5275 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5276
5277 /* Indicate that we need to distinguish between the return value of the
5278 present function and the return value of a function being called. */
5279 rtx_equal_function_value_matters = 1;
5280
5281 /* Indicate that we have not instantiated virtual registers yet. */
5282 virtuals_instantiated = 0;
5283
5284 /* Indicate we have no need of a frame pointer yet. */
5285 frame_pointer_needed = 0;
5286
5287 /* By default assume not varargs or stdarg. */
5288 current_function_varargs = 0;
5289 current_function_stdarg = 0;
5290 }
5291
5292 /* Indicate that the current function uses extra args
5293 not explicitly mentioned in the argument list in any fashion. */
5294
5295 void
5296 mark_varargs ()
5297 {
5298 current_function_varargs = 1;
5299 }
5300
5301 /* Expand a call to __main at the beginning of a possible main function. */
5302
5303 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5304 #undef HAS_INIT_SECTION
5305 #define HAS_INIT_SECTION
5306 #endif
5307
5308 void
5309 expand_main_function ()
5310 {
5311 #if !defined (HAS_INIT_SECTION)
5312 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5313 VOIDmode, 0);
5314 #endif /* not HAS_INIT_SECTION */
5315 }
5316 \f
5317 extern struct obstack permanent_obstack;
5318
5319 /* Start the RTL for a new function, and set variables used for
5320 emitting RTL.
5321 SUBR is the FUNCTION_DECL node.
5322 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5323 the function's parameters, which must be run at any return statement. */
5324
5325 void
5326 expand_function_start (subr, parms_have_cleanups)
5327 tree subr;
5328 int parms_have_cleanups;
5329 {
5330 register int i;
5331 tree tem;
5332 rtx last_ptr;
5333
5334 /* Make sure volatile mem refs aren't considered
5335 valid operands of arithmetic insns. */
5336 init_recog_no_volatile ();
5337
5338 /* If function gets a static chain arg, store it in the stack frame.
5339 Do this first, so it gets the first stack slot offset. */
5340 if (current_function_needs_context)
5341 {
5342 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5343
5344 /* Delay copying static chain if it is not a register to avoid
5345 conflicts with regs used for parameters. */
5346 if (! SMALL_REGISTER_CLASSES
5347 || GET_CODE (static_chain_incoming_rtx) == REG)
5348 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5349 }
5350
5351 /* If the parameters of this function need cleaning up, get a label
5352 for the beginning of the code which executes those cleanups. This must
5353 be done before doing anything with return_label. */
5354 if (parms_have_cleanups)
5355 cleanup_label = gen_label_rtx ();
5356 else
5357 cleanup_label = 0;
5358
5359 /* Make the label for return statements to jump to, if this machine
5360 does not have a one-instruction return and uses an epilogue,
5361 or if it returns a structure, or if it has parm cleanups. */
5362 #ifdef HAVE_return
5363 if (cleanup_label == 0 && HAVE_return
5364 && ! current_function_returns_pcc_struct
5365 && ! (current_function_returns_struct && ! optimize))
5366 return_label = 0;
5367 else
5368 return_label = gen_label_rtx ();
5369 #else
5370 return_label = gen_label_rtx ();
5371 #endif
5372
5373 /* Initialize rtx used to return the value. */
5374 /* Do this before assign_parms so that we copy the struct value address
5375 before any library calls that assign parms might generate. */
5376
5377 /* Decide whether to return the value in memory or in a register. */
5378 if (aggregate_value_p (DECL_RESULT (subr)))
5379 {
5380 /* Returning something that won't go in a register. */
5381 register rtx value_address = 0;
5382
5383 #ifdef PCC_STATIC_STRUCT_RETURN
5384 if (current_function_returns_pcc_struct)
5385 {
5386 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5387 value_address = assemble_static_space (size);
5388 }
5389 else
5390 #endif
5391 {
5392 /* Expect to be passed the address of a place to store the value.
5393 If it is passed as an argument, assign_parms will take care of
5394 it. */
5395 if (struct_value_incoming_rtx)
5396 {
5397 value_address = gen_reg_rtx (Pmode);
5398 emit_move_insn (value_address, struct_value_incoming_rtx);
5399 }
5400 }
5401 if (value_address)
5402 {
5403 DECL_RTL (DECL_RESULT (subr))
5404 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5405 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5406 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5407 }
5408 }
5409 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5410 /* If return mode is void, this decl rtl should not be used. */
5411 DECL_RTL (DECL_RESULT (subr)) = 0;
5412 else if (parms_have_cleanups)
5413 {
5414 /* If function will end with cleanup code for parms,
5415 compute the return values into a pseudo reg,
5416 which we will copy into the true return register
5417 after the cleanups are done. */
5418
5419 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5420
5421 #ifdef PROMOTE_FUNCTION_RETURN
5422 tree type = TREE_TYPE (DECL_RESULT (subr));
5423 int unsignedp = TREE_UNSIGNED (type);
5424
5425 mode = promote_mode (type, mode, &unsignedp, 1);
5426 #endif
5427
5428 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5429 }
5430 else
5431 /* Scalar, returned in a register. */
5432 {
5433 #ifdef FUNCTION_OUTGOING_VALUE
5434 DECL_RTL (DECL_RESULT (subr))
5435 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5436 #else
5437 DECL_RTL (DECL_RESULT (subr))
5438 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5439 #endif
5440
5441 /* Mark this reg as the function's return value. */
5442 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5443 {
5444 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5445 /* Needed because we may need to move this to memory
5446 in case it's a named return value whose address is taken. */
5447 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5448 }
5449 }
5450
5451 /* Initialize rtx for parameters and local variables.
5452 In some cases this requires emitting insns. */
5453
5454 assign_parms (subr, 0);
5455
5456 /* Copy the static chain now if it wasn't a register. The delay is to
5457 avoid conflicts with the parameter passing registers. */
5458
5459 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5460 if (GET_CODE (static_chain_incoming_rtx) != REG)
5461 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5462
5463 /* The following was moved from init_function_start.
5464 The move is supposed to make sdb output more accurate. */
5465 /* Indicate the beginning of the function body,
5466 as opposed to parm setup. */
5467 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5468
5469 /* If doing stupid allocation, mark parms as born here. */
5470
5471 if (GET_CODE (get_last_insn ()) != NOTE)
5472 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5473 parm_birth_insn = get_last_insn ();
5474
5475 if (obey_regdecls)
5476 {
5477 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5478 use_variable (regno_reg_rtx[i]);
5479
5480 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5481 use_variable (current_function_internal_arg_pointer);
5482 }
5483
5484 context_display = 0;
5485 if (current_function_needs_context)
5486 {
5487 /* Fetch static chain values for containing functions. */
5488 tem = decl_function_context (current_function_decl);
5489 /* If not doing stupid register allocation copy the static chain
5490 pointer into a pseudo. If we have small register classes, copy
5491 the value from memory if static_chain_incoming_rtx is a REG. If
5492 we do stupid register allocation, we use the stack address
5493 generated above. */
5494 if (tem && ! obey_regdecls)
5495 {
5496 /* If the static chain originally came in a register, put it back
5497 there, then move it out in the next insn. The reason for
5498 this peculiar code is to satisfy function integration. */
5499 if (SMALL_REGISTER_CLASSES
5500 && GET_CODE (static_chain_incoming_rtx) == REG)
5501 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5502 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5503 }
5504
5505 while (tem)
5506 {
5507 tree rtlexp = make_node (RTL_EXPR);
5508
5509 RTL_EXPR_RTL (rtlexp) = last_ptr;
5510 context_display = tree_cons (tem, rtlexp, context_display);
5511 tem = decl_function_context (tem);
5512 if (tem == 0)
5513 break;
5514 /* Chain thru stack frames, assuming pointer to next lexical frame
5515 is found at the place we always store it. */
5516 #ifdef FRAME_GROWS_DOWNWARD
5517 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5518 #endif
5519 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5520 memory_address (Pmode, last_ptr)));
5521
5522 /* If we are not optimizing, ensure that we know that this
5523 piece of context is live over the entire function. */
5524 if (! optimize)
5525 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5526 save_expr_regs);
5527 }
5528 }
5529
5530 /* After the display initializations is where the tail-recursion label
5531 should go, if we end up needing one. Ensure we have a NOTE here
5532 since some things (like trampolines) get placed before this. */
5533 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5534
5535 /* Evaluate now the sizes of any types declared among the arguments. */
5536 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5537 {
5538 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5539 EXPAND_MEMORY_USE_BAD);
5540 /* Flush the queue in case this parameter declaration has
5541 side-effects. */
5542 emit_queue ();
5543 }
5544
5545 /* Make sure there is a line number after the function entry setup code. */
5546 force_next_line_note ();
5547 }
5548 \f
5549 /* Generate RTL for the end of the current function.
5550 FILENAME and LINE are the current position in the source file.
5551
5552 It is up to language-specific callers to do cleanups for parameters--
5553 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5554
5555 void
5556 expand_function_end (filename, line, end_bindings)
5557 char *filename;
5558 int line;
5559 int end_bindings;
5560 {
5561 register int i;
5562 tree link;
5563
5564 #ifdef TRAMPOLINE_TEMPLATE
5565 static rtx initial_trampoline;
5566 #endif
5567
5568 #ifdef NON_SAVING_SETJMP
5569 /* Don't put any variables in registers if we call setjmp
5570 on a machine that fails to restore the registers. */
5571 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5572 {
5573 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5574 setjmp_protect (DECL_INITIAL (current_function_decl));
5575
5576 setjmp_protect_args ();
5577 }
5578 #endif
5579
5580 /* Save the argument pointer if a save area was made for it. */
5581 if (arg_pointer_save_area)
5582 {
5583 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5584 emit_insn_before (x, tail_recursion_reentry);
5585 }
5586
5587 /* Initialize any trampolines required by this function. */
5588 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5589 {
5590 tree function = TREE_PURPOSE (link);
5591 rtx context = lookup_static_chain (function);
5592 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5593 rtx blktramp;
5594 rtx seq;
5595
5596 #ifdef TRAMPOLINE_TEMPLATE
5597 /* First make sure this compilation has a template for
5598 initializing trampolines. */
5599 if (initial_trampoline == 0)
5600 {
5601 end_temporary_allocation ();
5602 initial_trampoline
5603 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5604 resume_temporary_allocation ();
5605 }
5606 #endif
5607
5608 /* Generate insns to initialize the trampoline. */
5609 start_sequence ();
5610 tramp = round_trampoline_addr (XEXP (tramp, 0));
5611 #ifdef TRAMPOLINE_TEMPLATE
5612 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5613 emit_block_move (blktramp, initial_trampoline,
5614 GEN_INT (TRAMPOLINE_SIZE),
5615 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5616 #endif
5617 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5618 seq = get_insns ();
5619 end_sequence ();
5620
5621 /* Put those insns at entry to the containing function (this one). */
5622 emit_insns_before (seq, tail_recursion_reentry);
5623 }
5624
5625 /* If we are doing stack checking and this function makes calls,
5626 do a stack probe at the start of the function to ensure we have enough
5627 space for another stack frame. */
5628 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5629 {
5630 rtx insn, seq;
5631
5632 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5633 if (GET_CODE (insn) == CALL_INSN)
5634 {
5635 start_sequence ();
5636 probe_stack_range (STACK_CHECK_PROTECT,
5637 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5638 seq = get_insns ();
5639 end_sequence ();
5640 emit_insns_before (seq, tail_recursion_reentry);
5641 break;
5642 }
5643 }
5644
5645 /* Warn about unused parms if extra warnings were specified. */
5646 if (warn_unused && extra_warnings)
5647 {
5648 tree decl;
5649
5650 for (decl = DECL_ARGUMENTS (current_function_decl);
5651 decl; decl = TREE_CHAIN (decl))
5652 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5653 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5654 warning_with_decl (decl, "unused parameter `%s'");
5655 }
5656
5657 /* Delete handlers for nonlocal gotos if nothing uses them. */
5658 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5659 delete_handlers ();
5660
5661 /* End any sequences that failed to be closed due to syntax errors. */
5662 while (in_sequence_p ())
5663 end_sequence ();
5664
5665 /* Outside function body, can't compute type's actual size
5666 until next function's body starts. */
5667 immediate_size_expand--;
5668
5669 /* If doing stupid register allocation,
5670 mark register parms as dying here. */
5671
5672 if (obey_regdecls)
5673 {
5674 rtx tem;
5675 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5676 use_variable (regno_reg_rtx[i]);
5677
5678 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5679
5680 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5681 {
5682 use_variable (XEXP (tem, 0));
5683 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5684 }
5685
5686 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5687 use_variable (current_function_internal_arg_pointer);
5688 }
5689
5690 clear_pending_stack_adjust ();
5691 do_pending_stack_adjust ();
5692
5693 /* Mark the end of the function body.
5694 If control reaches this insn, the function can drop through
5695 without returning a value. */
5696 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5697
5698 /* Must mark the last line number note in the function, so that the test
5699 coverage code can avoid counting the last line twice. This just tells
5700 the code to ignore the immediately following line note, since there
5701 already exists a copy of this note somewhere above. This line number
5702 note is still needed for debugging though, so we can't delete it. */
5703 if (flag_test_coverage)
5704 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
5705
5706 /* Output a linenumber for the end of the function.
5707 SDB depends on this. */
5708 emit_line_note_force (filename, line);
5709
5710 /* Output the label for the actual return from the function,
5711 if one is expected. This happens either because a function epilogue
5712 is used instead of a return instruction, or because a return was done
5713 with a goto in order to run local cleanups, or because of pcc-style
5714 structure returning. */
5715
5716 if (return_label)
5717 emit_label (return_label);
5718
5719 /* C++ uses this. */
5720 if (end_bindings)
5721 expand_end_bindings (0, 0, 0);
5722
5723 /* Now handle any leftover exception regions that may have been
5724 created for the parameters. */
5725 {
5726 rtx last = get_last_insn ();
5727 rtx label;
5728
5729 expand_leftover_cleanups ();
5730
5731 /* If the above emitted any code, may sure we jump around it. */
5732 if (last != get_last_insn ())
5733 {
5734 label = gen_label_rtx ();
5735 last = emit_jump_insn_after (gen_jump (label), last);
5736 last = emit_barrier_after (last);
5737 emit_label (label);
5738 }
5739 }
5740
5741 /* If we had calls to alloca, and this machine needs
5742 an accurate stack pointer to exit the function,
5743 insert some code to save and restore the stack pointer. */
5744 #ifdef EXIT_IGNORE_STACK
5745 if (! EXIT_IGNORE_STACK)
5746 #endif
5747 if (current_function_calls_alloca)
5748 {
5749 rtx tem = 0;
5750
5751 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5752 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5753 }
5754
5755 /* If scalar return value was computed in a pseudo-reg,
5756 copy that to the hard return register. */
5757 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5758 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5759 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5760 >= FIRST_PSEUDO_REGISTER))
5761 {
5762 rtx real_decl_result;
5763
5764 #ifdef FUNCTION_OUTGOING_VALUE
5765 real_decl_result
5766 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5767 current_function_decl);
5768 #else
5769 real_decl_result
5770 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5771 current_function_decl);
5772 #endif
5773 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5774 /* If this is a BLKmode structure being returned in registers, then use
5775 the mode computed in expand_return. */
5776 if (GET_MODE (real_decl_result) == BLKmode)
5777 PUT_MODE (real_decl_result,
5778 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
5779 emit_move_insn (real_decl_result,
5780 DECL_RTL (DECL_RESULT (current_function_decl)));
5781 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
5782
5783 /* The delay slot scheduler assumes that current_function_return_rtx
5784 holds the hard register containing the return value, not a temporary
5785 pseudo. */
5786 current_function_return_rtx = real_decl_result;
5787 }
5788
5789 /* If returning a structure, arrange to return the address of the value
5790 in a place where debuggers expect to find it.
5791
5792 If returning a structure PCC style,
5793 the caller also depends on this value.
5794 And current_function_returns_pcc_struct is not necessarily set. */
5795 if (current_function_returns_struct
5796 || current_function_returns_pcc_struct)
5797 {
5798 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5799 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5800 #ifdef FUNCTION_OUTGOING_VALUE
5801 rtx outgoing
5802 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5803 current_function_decl);
5804 #else
5805 rtx outgoing
5806 = FUNCTION_VALUE (build_pointer_type (type),
5807 current_function_decl);
5808 #endif
5809
5810 /* Mark this as a function return value so integrate will delete the
5811 assignment and USE below when inlining this function. */
5812 REG_FUNCTION_VALUE_P (outgoing) = 1;
5813
5814 emit_move_insn (outgoing, value_address);
5815 use_variable (outgoing);
5816 }
5817
5818 /* Output a return insn if we are using one.
5819 Otherwise, let the rtl chain end here, to drop through
5820 into the epilogue. */
5821
5822 #ifdef HAVE_return
5823 if (HAVE_return)
5824 {
5825 emit_jump_insn (gen_return ());
5826 emit_barrier ();
5827 }
5828 #endif
5829
5830 /* Fix up any gotos that jumped out to the outermost
5831 binding level of the function.
5832 Must follow emitting RETURN_LABEL. */
5833
5834 /* If you have any cleanups to do at this point,
5835 and they need to create temporary variables,
5836 then you will lose. */
5837 expand_fixups (get_insns ());
5838 }
5839 \f
5840 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5841
5842 static int *prologue;
5843 static int *epilogue;
5844
5845 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5846 or a single insn). */
5847
5848 static int *
5849 record_insns (insns)
5850 rtx insns;
5851 {
5852 int *vec;
5853
5854 if (GET_CODE (insns) == SEQUENCE)
5855 {
5856 int len = XVECLEN (insns, 0);
5857 vec = (int *) oballoc ((len + 1) * sizeof (int));
5858 vec[len] = 0;
5859 while (--len >= 0)
5860 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5861 }
5862 else
5863 {
5864 vec = (int *) oballoc (2 * sizeof (int));
5865 vec[0] = INSN_UID (insns);
5866 vec[1] = 0;
5867 }
5868 return vec;
5869 }
5870
5871 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5872
5873 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5874 static int
5875 contains (insn, vec)
5876 rtx insn;
5877 int *vec;
5878 {
5879 register int i, j;
5880
5881 if (GET_CODE (insn) == INSN
5882 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5883 {
5884 int count = 0;
5885 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5886 for (j = 0; vec[j]; j++)
5887 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5888 count++;
5889 return count;
5890 }
5891 else
5892 {
5893 for (j = 0; vec[j]; j++)
5894 if (INSN_UID (insn) == vec[j])
5895 return 1;
5896 }
5897 return 0;
5898 }
5899 #endif /* HAVE_prologue || HAVE_epilogue */
5900
5901 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5902 this into place with notes indicating where the prologue ends and where
5903 the epilogue begins. Update the basic block information when possible. */
5904
5905 void
5906 thread_prologue_and_epilogue_insns (f)
5907 rtx f;
5908 {
5909 #ifdef HAVE_prologue
5910 if (HAVE_prologue)
5911 {
5912 rtx head, seq;
5913
5914 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5915 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5916 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5917 seq = gen_prologue ();
5918 head = emit_insn_after (seq, f);
5919
5920 /* Include the new prologue insns in the first block. Ignore them
5921 if they form a basic block unto themselves. */
5922 if (basic_block_head && n_basic_blocks
5923 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5924 basic_block_head[0] = NEXT_INSN (f);
5925
5926 /* Retain a map of the prologue insns. */
5927 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5928 }
5929 else
5930 #endif
5931 prologue = 0;
5932
5933 #ifdef HAVE_epilogue
5934 if (HAVE_epilogue)
5935 {
5936 rtx insn = get_last_insn ();
5937 rtx prev = prev_nonnote_insn (insn);
5938
5939 /* If we end with a BARRIER, we don't need an epilogue. */
5940 if (! (prev && GET_CODE (prev) == BARRIER))
5941 {
5942 rtx tail, seq, tem;
5943 rtx first_use = 0;
5944 rtx last_use = 0;
5945
5946 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5947 epilogue insns, the USE insns at the end of a function,
5948 the jump insn that returns, and then a BARRIER. */
5949
5950 /* Move the USE insns at the end of a function onto a list. */
5951 while (prev
5952 && GET_CODE (prev) == INSN
5953 && GET_CODE (PATTERN (prev)) == USE)
5954 {
5955 tem = prev;
5956 prev = prev_nonnote_insn (prev);
5957
5958 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5959 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5960 if (first_use)
5961 {
5962 NEXT_INSN (tem) = first_use;
5963 PREV_INSN (first_use) = tem;
5964 }
5965 first_use = tem;
5966 if (!last_use)
5967 last_use = tem;
5968 }
5969
5970 emit_barrier_after (insn);
5971
5972 seq = gen_epilogue ();
5973 tail = emit_jump_insn_after (seq, insn);
5974
5975 /* Insert the USE insns immediately before the return insn, which
5976 must be the first instruction before the final barrier. */
5977 if (first_use)
5978 {
5979 tem = prev_nonnote_insn (get_last_insn ());
5980 NEXT_INSN (PREV_INSN (tem)) = first_use;
5981 PREV_INSN (first_use) = PREV_INSN (tem);
5982 PREV_INSN (tem) = last_use;
5983 NEXT_INSN (last_use) = tem;
5984 }
5985
5986 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5987
5988 /* Include the new epilogue insns in the last block. Ignore
5989 them if they form a basic block unto themselves. */
5990 if (basic_block_end && n_basic_blocks
5991 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5992 basic_block_end[n_basic_blocks - 1] = tail;
5993
5994 /* Retain a map of the epilogue insns. */
5995 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5996 return;
5997 }
5998 }
5999 #endif
6000 epilogue = 0;
6001 }
6002
6003 /* Reposition the prologue-end and epilogue-begin notes after instruction
6004 scheduling and delayed branch scheduling. */
6005
6006 void
6007 reposition_prologue_and_epilogue_notes (f)
6008 rtx f;
6009 {
6010 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6011 /* Reposition the prologue and epilogue notes. */
6012 if (n_basic_blocks)
6013 {
6014 rtx next, prev;
6015 int len;
6016
6017 if (prologue)
6018 {
6019 register rtx insn, note = 0;
6020
6021 /* Scan from the beginning until we reach the last prologue insn.
6022 We apparently can't depend on basic_block_{head,end} after
6023 reorg has run. */
6024 for (len = 0; prologue[len]; len++)
6025 ;
6026 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6027 {
6028 if (GET_CODE (insn) == NOTE)
6029 {
6030 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6031 note = insn;
6032 }
6033 else if ((len -= contains (insn, prologue)) == 0)
6034 {
6035 /* Find the prologue-end note if we haven't already, and
6036 move it to just after the last prologue insn. */
6037 if (note == 0)
6038 {
6039 for (note = insn; (note = NEXT_INSN (note));)
6040 if (GET_CODE (note) == NOTE
6041 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6042 break;
6043 }
6044 next = NEXT_INSN (note);
6045 prev = PREV_INSN (note);
6046 if (prev)
6047 NEXT_INSN (prev) = next;
6048 if (next)
6049 PREV_INSN (next) = prev;
6050 add_insn_after (note, insn);
6051 }
6052 }
6053 }
6054
6055 if (epilogue)
6056 {
6057 register rtx insn, note = 0;
6058
6059 /* Scan from the end until we reach the first epilogue insn.
6060 We apparently can't depend on basic_block_{head,end} after
6061 reorg has run. */
6062 for (len = 0; epilogue[len]; len++)
6063 ;
6064 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6065 {
6066 if (GET_CODE (insn) == NOTE)
6067 {
6068 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6069 note = insn;
6070 }
6071 else if ((len -= contains (insn, epilogue)) == 0)
6072 {
6073 /* Find the epilogue-begin note if we haven't already, and
6074 move it to just before the first epilogue insn. */
6075 if (note == 0)
6076 {
6077 for (note = insn; (note = PREV_INSN (note));)
6078 if (GET_CODE (note) == NOTE
6079 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6080 break;
6081 }
6082 next = NEXT_INSN (note);
6083 prev = PREV_INSN (note);
6084 if (prev)
6085 NEXT_INSN (prev) = next;
6086 if (next)
6087 PREV_INSN (next) = prev;
6088 add_insn_after (note, PREV_INSN (insn));
6089 }
6090 }
6091 }
6092 }
6093 #endif /* HAVE_prologue or HAVE_epilogue */
6094 }
This page took 0.315829 seconds and 6 git commands to generate.