]> gcc.gnu.org Git - gcc.git/blob - gcc/function.c
calls.c (compute_argument_block_size): New argument preferred_stack_boundary.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-99, 2000 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59 #include "hash.h"
60 #include "ggc.h"
61 #include "tm_p.h"
62
63 #ifndef TRAMPOLINE_ALIGNMENT
64 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
65 #endif
66
67 #ifndef LOCAL_ALIGNMENT
68 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
69 #endif
70
71 #if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
72 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
73 #endif
74
75 /* Some systems use __main in a way incompatible with its use in gcc, in these
76 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
77 give the same symbol without quotes for an alternative entry point. You
78 must define both, or neither. */
79 #ifndef NAME__MAIN
80 #define NAME__MAIN "__main"
81 #define SYMBOL__MAIN __main
82 #endif
83
84 /* Round a value to the lowest integer less than it that is a multiple of
85 the required alignment. Avoid using division in case the value is
86 negative. Assume the alignment is a power of two. */
87 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
88
89 /* Similar, but round to the next highest integer that meets the
90 alignment. */
91 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
92
93 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
94 during rtl generation. If they are different register numbers, this is
95 always true. It may also be true if
96 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
97 generation. See fix_lexical_addr for details. */
98
99 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
100 #define NEED_SEPARATE_AP
101 #endif
102
103 /* Nonzero if function being compiled doesn't contain any calls
104 (ignoring the prologue and epilogue). This is set prior to
105 local register allocation and is valid for the remaining
106 compiler passes. */
107 int current_function_is_leaf;
108
109 /* Nonzero if function being compiled doesn't modify the stack pointer
110 (ignoring the prologue and epilogue). This is only valid after
111 life_analysis has run. */
112 int current_function_sp_is_unchanging;
113
114 /* Nonzero if the function being compiled is a leaf function which only
115 uses leaf registers. This is valid after reload (specifically after
116 sched2) and is useful only if the port defines LEAF_REGISTERS. */
117 int current_function_uses_only_leaf_regs;
118
119 /* Nonzero once virtual register instantiation has been done.
120 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
121 static int virtuals_instantiated;
122
123 /* These variables hold pointers to functions to
124 save and restore machine-specific data,
125 in push_function_context and pop_function_context. */
126 void (*init_machine_status) PARAMS ((struct function *));
127 void (*save_machine_status) PARAMS ((struct function *));
128 void (*restore_machine_status) PARAMS ((struct function *));
129 void (*mark_machine_status) PARAMS ((struct function *));
130 void (*free_machine_status) PARAMS ((struct function *));
131
132 /* Likewise, but for language-specific data. */
133 void (*init_lang_status) PARAMS ((struct function *));
134 void (*save_lang_status) PARAMS ((struct function *));
135 void (*restore_lang_status) PARAMS ((struct function *));
136 void (*mark_lang_status) PARAMS ((struct function *));
137 void (*free_lang_status) PARAMS ((struct function *));
138
139 /* The FUNCTION_DECL for an inline function currently being expanded. */
140 tree inline_function_decl;
141
142 /* The currently compiled function. */
143 struct function *cfun = 0;
144
145 /* Global list of all compiled functions. */
146 struct function *all_functions = 0;
147
148 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
149 static int *prologue;
150 static int *epilogue;
151 \f
152 /* In order to evaluate some expressions, such as function calls returning
153 structures in memory, we need to temporarily allocate stack locations.
154 We record each allocated temporary in the following structure.
155
156 Associated with each temporary slot is a nesting level. When we pop up
157 one level, all temporaries associated with the previous level are freed.
158 Normally, all temporaries are freed after the execution of the statement
159 in which they were created. However, if we are inside a ({...}) grouping,
160 the result may be in a temporary and hence must be preserved. If the
161 result could be in a temporary, we preserve it if we can determine which
162 one it is in. If we cannot determine which temporary may contain the
163 result, all temporaries are preserved. A temporary is preserved by
164 pretending it was allocated at the previous nesting level.
165
166 Automatic variables are also assigned temporary slots, at the nesting
167 level where they are defined. They are marked a "kept" so that
168 free_temp_slots will not free them. */
169
170 struct temp_slot
171 {
172 /* Points to next temporary slot. */
173 struct temp_slot *next;
174 /* The rtx to used to reference the slot. */
175 rtx slot;
176 /* The rtx used to represent the address if not the address of the
177 slot above. May be an EXPR_LIST if multiple addresses exist. */
178 rtx address;
179 /* The alignment (in bits) of the slot. */
180 int align;
181 /* The size, in units, of the slot. */
182 HOST_WIDE_INT size;
183 /* The alias set for the slot. If the alias set is zero, we don't
184 know anything about the alias set of the slot. We must only
185 reuse a slot if it is assigned an object of the same alias set.
186 Otherwise, the rest of the compiler may assume that the new use
187 of the slot cannot alias the old use of the slot, which is
188 false. If the slot has alias set zero, then we can't reuse the
189 slot at all, since we have no idea what alias set may have been
190 imposed on the memory. For example, if the stack slot is the
191 call frame for an inline functioned, we have no idea what alias
192 sets will be assigned to various pieces of the call frame. */
193 int alias_set;
194 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
195 tree rtl_expr;
196 /* Non-zero if this temporary is currently in use. */
197 char in_use;
198 /* Non-zero if this temporary has its address taken. */
199 char addr_taken;
200 /* Nesting level at which this slot is being used. */
201 int level;
202 /* Non-zero if this should survive a call to free_temp_slots. */
203 int keep;
204 /* The offset of the slot from the frame_pointer, including extra space
205 for alignment. This info is for combine_temp_slots. */
206 HOST_WIDE_INT base_offset;
207 /* The size of the slot, including extra space for alignment. This
208 info is for combine_temp_slots. */
209 HOST_WIDE_INT full_size;
210 };
211 \f
212 /* This structure is used to record MEMs or pseudos used to replace VAR, any
213 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
214 maintain this list in case two operands of an insn were required to match;
215 in that case we must ensure we use the same replacement. */
216
217 struct fixup_replacement
218 {
219 rtx old;
220 rtx new;
221 struct fixup_replacement *next;
222 };
223
224 struct insns_for_mem_entry {
225 /* The KEY in HE will be a MEM. */
226 struct hash_entry he;
227 /* These are the INSNS which reference the MEM. */
228 rtx insns;
229 };
230
231 /* Forward declarations. */
232
233 static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
234 int, struct function *));
235 static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode,
236 HOST_WIDE_INT, int, tree));
237 static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
238 static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
239 enum machine_mode, enum machine_mode,
240 int, int, int, struct hash_table *));
241 static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
242 struct hash_table *));
243 static struct fixup_replacement
244 *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
245 static void fixup_var_refs_insns PARAMS ((rtx, enum machine_mode, int,
246 rtx, int, struct hash_table *));
247 static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
248 struct fixup_replacement **));
249 static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
250 static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
251 static rtx fixup_stack_1 PARAMS ((rtx, rtx));
252 static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
253 static void instantiate_decls PARAMS ((tree, int));
254 static void instantiate_decls_1 PARAMS ((tree, int));
255 static void instantiate_decl PARAMS ((rtx, int, int));
256 static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
257 static void delete_handlers PARAMS ((void));
258 static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
259 struct args_size *));
260 #ifndef ARGS_GROW_DOWNWARD
261 static void pad_below PARAMS ((struct args_size *, enum machine_mode,
262 tree));
263 #endif
264 #ifdef ARGS_GROW_DOWNWARD
265 static tree round_down PARAMS ((tree, int));
266 #endif
267 static rtx round_trampoline_addr PARAMS ((rtx));
268 static tree blocks_nreverse PARAMS ((tree));
269 static int all_blocks PARAMS ((tree, tree *));
270 /* We always define `record_insns' even if its not used so that we
271 can always export `prologue_epilogue_contains'. */
272 static int *record_insns PARAMS ((rtx)) ATTRIBUTE_UNUSED;
273 static int contains PARAMS ((rtx, int *));
274 static void emit_return_into_block PARAMS ((basic_block));
275 static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
276 static boolean purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
277 struct hash_table *));
278 static int is_addressof PARAMS ((rtx *, void *));
279 static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
280 struct hash_table *,
281 hash_table_key));
282 static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
283 static boolean insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
284 static int insns_for_mem_walk PARAMS ((rtx *, void *));
285 static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
286 static void mark_temp_slot PARAMS ((struct temp_slot *));
287 static void mark_function_status PARAMS ((struct function *));
288 static void mark_function_chain PARAMS ((void *));
289 static void prepare_function_start PARAMS ((void));
290 static void do_clobber_return_reg PARAMS ((rtx, void *));
291 static void do_use_return_reg PARAMS ((rtx, void *));
292 \f
293 /* Pointer to chain of `struct function' for containing functions. */
294 struct function *outer_function_chain;
295
296 /* Given a function decl for a containing function,
297 return the `struct function' for it. */
298
299 struct function *
300 find_function_data (decl)
301 tree decl;
302 {
303 struct function *p;
304
305 for (p = outer_function_chain; p; p = p->next)
306 if (p->decl == decl)
307 return p;
308
309 abort ();
310 }
311
312 /* Save the current context for compilation of a nested function.
313 This is called from language-specific code. The caller should use
314 the save_lang_status callback to save any language-specific state,
315 since this function knows only about language-independent
316 variables. */
317
318 void
319 push_function_context_to (context)
320 tree context;
321 {
322 struct function *p, *context_data;
323
324 if (context)
325 {
326 context_data = (context == current_function_decl
327 ? cfun
328 : find_function_data (context));
329 context_data->contains_functions = 1;
330 }
331
332 if (cfun == 0)
333 init_dummy_function_start ();
334 p = cfun;
335
336 p->next = outer_function_chain;
337 outer_function_chain = p;
338 p->fixup_var_refs_queue = 0;
339
340 save_tree_status (p);
341 if (save_lang_status)
342 (*save_lang_status) (p);
343 if (save_machine_status)
344 (*save_machine_status) (p);
345
346 cfun = 0;
347 }
348
349 void
350 push_function_context ()
351 {
352 push_function_context_to (current_function_decl);
353 }
354
355 /* Restore the last saved context, at the end of a nested function.
356 This function is called from language-specific code. */
357
358 void
359 pop_function_context_from (context)
360 tree context ATTRIBUTE_UNUSED;
361 {
362 struct function *p = outer_function_chain;
363 struct var_refs_queue *queue;
364 struct var_refs_queue *next;
365
366 cfun = p;
367 outer_function_chain = p->next;
368
369 current_function_decl = p->decl;
370 reg_renumber = 0;
371
372 restore_tree_status (p);
373 restore_emit_status (p);
374
375 if (restore_machine_status)
376 (*restore_machine_status) (p);
377 if (restore_lang_status)
378 (*restore_lang_status) (p);
379
380 /* Finish doing put_var_into_stack for any of our variables
381 which became addressable during the nested function. */
382 for (queue = p->fixup_var_refs_queue; queue; queue = next)
383 {
384 next = queue->next;
385 fixup_var_refs (queue->modified, queue->promoted_mode,
386 queue->unsignedp, 0);
387 free (queue);
388 }
389 p->fixup_var_refs_queue = 0;
390
391 /* Reset variables that have known state during rtx generation. */
392 rtx_equal_function_value_matters = 1;
393 virtuals_instantiated = 0;
394 }
395
396 void
397 pop_function_context ()
398 {
399 pop_function_context_from (current_function_decl);
400 }
401
402 /* Clear out all parts of the state in F that can safely be discarded
403 after the function has been parsed, but not compiled, to let
404 garbage collection reclaim the memory. */
405
406 void
407 free_after_parsing (f)
408 struct function *f;
409 {
410 /* f->expr->forced_labels is used by code generation. */
411 /* f->emit->regno_reg_rtx is used by code generation. */
412 /* f->varasm is used by code generation. */
413 /* f->eh->eh_return_stub_label is used by code generation. */
414
415 if (free_lang_status)
416 (*free_lang_status) (f);
417 free_stmt_status (f);
418 }
419
420 /* Clear out all parts of the state in F that can safely be discarded
421 after the function has been compiled, to let garbage collection
422 reclaim the memory. */
423
424 void
425 free_after_compilation (f)
426 struct function *f;
427 {
428 free_eh_status (f);
429 free_expr_status (f);
430 free_emit_status (f);
431 free_varasm_status (f);
432
433 if (free_machine_status)
434 (*free_machine_status) (f);
435
436 if (f->x_parm_reg_stack_loc)
437 free (f->x_parm_reg_stack_loc);
438
439 f->arg_offset_rtx = NULL;
440 f->return_rtx = NULL;
441 f->internal_arg_pointer = NULL;
442 f->x_nonlocal_labels = NULL;
443 f->x_nonlocal_goto_handler_slots = NULL;
444 f->x_nonlocal_goto_handler_labels = NULL;
445 f->x_nonlocal_goto_stack_level = NULL;
446 f->x_cleanup_label = NULL;
447 f->x_return_label = NULL;
448 f->x_save_expr_regs = NULL;
449 f->x_stack_slot_list = NULL;
450 f->x_rtl_expr_chain = NULL;
451 f->x_tail_recursion_label = NULL;
452 f->x_tail_recursion_reentry = NULL;
453 f->x_arg_pointer_save_area = NULL;
454 f->x_context_display = NULL;
455 f->x_trampoline_list = NULL;
456 f->x_parm_birth_insn = NULL;
457 f->x_last_parm_insn = NULL;
458 f->x_parm_reg_stack_loc = NULL;
459 f->x_temp_slots = NULL;
460 f->fixup_var_refs_queue = NULL;
461 f->original_arg_vector = NULL;
462 f->original_decl_initial = NULL;
463 f->inl_last_parm_insn = NULL;
464 f->epilogue_delay_list = NULL;
465 }
466
467 \f
468 /* Allocate fixed slots in the stack frame of the current function. */
469
470 /* Return size needed for stack frame based on slots so far allocated in
471 function F.
472 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
473 the caller may have to do that. */
474
475 HOST_WIDE_INT
476 get_func_frame_size (f)
477 struct function *f;
478 {
479 #ifdef FRAME_GROWS_DOWNWARD
480 return -f->x_frame_offset;
481 #else
482 return f->x_frame_offset;
483 #endif
484 }
485
486 /* Return size needed for stack frame based on slots so far allocated.
487 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
488 the caller may have to do that. */
489 HOST_WIDE_INT
490 get_frame_size ()
491 {
492 return get_func_frame_size (cfun);
493 }
494
495 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
496 with machine mode MODE.
497
498 ALIGN controls the amount of alignment for the address of the slot:
499 0 means according to MODE,
500 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
501 positive specifies alignment boundary in bits.
502
503 We do not round to stack_boundary here.
504
505 FUNCTION specifies the function to allocate in. */
506
507 static rtx
508 assign_stack_local_1 (mode, size, align, function)
509 enum machine_mode mode;
510 HOST_WIDE_INT size;
511 int align;
512 struct function *function;
513 {
514 register rtx x, addr;
515 int bigend_correction = 0;
516 int alignment;
517
518 /* Allocate in the memory associated with the function in whose frame
519 we are assigning. */
520 if (function != cfun)
521 push_obstacks (function->function_obstack,
522 function->function_maybepermanent_obstack);
523
524 if (align == 0)
525 {
526 tree type;
527
528 alignment = GET_MODE_ALIGNMENT (mode);
529 if (mode == BLKmode)
530 alignment = BIGGEST_ALIGNMENT;
531
532 /* Allow the target to (possibly) increase the alignment of this
533 stack slot. */
534 type = type_for_mode (mode, 0);
535 if (type)
536 alignment = LOCAL_ALIGNMENT (type, alignment);
537
538 alignment /= BITS_PER_UNIT;
539 }
540 else if (align == -1)
541 {
542 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
543 size = CEIL_ROUND (size, alignment);
544 }
545 else
546 alignment = align / BITS_PER_UNIT;
547
548 #ifdef FRAME_GROWS_DOWNWARD
549 function->x_frame_offset -= size;
550 #endif
551
552 /* Ignore alignment we can't do with expected alignment of the boundary. */
553 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
554 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
555
556 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
557 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
558
559 /* Round frame offset to that alignment.
560 We must be careful here, since FRAME_OFFSET might be negative and
561 division with a negative dividend isn't as well defined as we might
562 like. So we instead assume that ALIGNMENT is a power of two and
563 use logical operations which are unambiguous. */
564 #ifdef FRAME_GROWS_DOWNWARD
565 function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset, alignment);
566 #else
567 function->x_frame_offset = CEIL_ROUND (function->x_frame_offset, alignment);
568 #endif
569
570 /* On a big-endian machine, if we are allocating more space than we will use,
571 use the least significant bytes of those that are allocated. */
572 if (BYTES_BIG_ENDIAN && mode != BLKmode)
573 bigend_correction = size - GET_MODE_SIZE (mode);
574
575 /* If we have already instantiated virtual registers, return the actual
576 address relative to the frame pointer. */
577 if (function == cfun && virtuals_instantiated)
578 addr = plus_constant (frame_pointer_rtx,
579 (frame_offset + bigend_correction
580 + STARTING_FRAME_OFFSET));
581 else
582 addr = plus_constant (virtual_stack_vars_rtx,
583 function->x_frame_offset + bigend_correction);
584
585 #ifndef FRAME_GROWS_DOWNWARD
586 function->x_frame_offset += size;
587 #endif
588
589 x = gen_rtx_MEM (mode, addr);
590
591 function->x_stack_slot_list
592 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
593
594 if (function != cfun)
595 pop_obstacks ();
596
597 return x;
598 }
599
600 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
601 current function. */
602 rtx
603 assign_stack_local (mode, size, align)
604 enum machine_mode mode;
605 HOST_WIDE_INT size;
606 int align;
607 {
608 return assign_stack_local_1 (mode, size, align, cfun);
609 }
610 \f
611 /* Allocate a temporary stack slot and record it for possible later
612 reuse.
613
614 MODE is the machine mode to be given to the returned rtx.
615
616 SIZE is the size in units of the space required. We do no rounding here
617 since assign_stack_local will do any required rounding.
618
619 KEEP is 1 if this slot is to be retained after a call to
620 free_temp_slots. Automatic variables for a block are allocated
621 with this flag. KEEP is 2 if we allocate a longer term temporary,
622 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
623 if we are to allocate something at an inner level to be treated as
624 a variable in the block (e.g., a SAVE_EXPR).
625
626 TYPE is the type that will be used for the stack slot. */
627
628 static rtx
629 assign_stack_temp_for_type (mode, size, keep, type)
630 enum machine_mode mode;
631 HOST_WIDE_INT size;
632 int keep;
633 tree type;
634 {
635 int align;
636 int alias_set;
637 struct temp_slot *p, *best_p = 0;
638
639 /* If SIZE is -1 it means that somebody tried to allocate a temporary
640 of a variable size. */
641 if (size == -1)
642 abort ();
643
644 /* If we know the alias set for the memory that will be used, use
645 it. If there's no TYPE, then we don't know anything about the
646 alias set for the memory. */
647 if (type)
648 alias_set = get_alias_set (type);
649 else
650 alias_set = 0;
651
652 align = GET_MODE_ALIGNMENT (mode);
653 if (mode == BLKmode)
654 align = BIGGEST_ALIGNMENT;
655
656 if (! type)
657 type = type_for_mode (mode, 0);
658 if (type)
659 align = LOCAL_ALIGNMENT (type, align);
660
661 /* Try to find an available, already-allocated temporary of the proper
662 mode which meets the size and alignment requirements. Choose the
663 smallest one with the closest alignment. */
664 for (p = temp_slots; p; p = p->next)
665 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
666 && ! p->in_use
667 && (!flag_strict_aliasing
668 || (alias_set && p->alias_set == alias_set))
669 && (best_p == 0 || best_p->size > p->size
670 || (best_p->size == p->size && best_p->align > p->align)))
671 {
672 if (p->align == align && p->size == size)
673 {
674 best_p = 0;
675 break;
676 }
677 best_p = p;
678 }
679
680 /* Make our best, if any, the one to use. */
681 if (best_p)
682 {
683 /* If there are enough aligned bytes left over, make them into a new
684 temp_slot so that the extra bytes don't get wasted. Do this only
685 for BLKmode slots, so that we can be sure of the alignment. */
686 if (GET_MODE (best_p->slot) == BLKmode
687 /* We can't split slots if -fstrict-aliasing because the
688 information about the alias set for the new slot will be
689 lost. */
690 && !flag_strict_aliasing)
691 {
692 int alignment = best_p->align / BITS_PER_UNIT;
693 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
694
695 if (best_p->size - rounded_size >= alignment)
696 {
697 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
698 p->in_use = p->addr_taken = 0;
699 p->size = best_p->size - rounded_size;
700 p->base_offset = best_p->base_offset + rounded_size;
701 p->full_size = best_p->full_size - rounded_size;
702 p->slot = gen_rtx_MEM (BLKmode,
703 plus_constant (XEXP (best_p->slot, 0),
704 rounded_size));
705 p->align = best_p->align;
706 p->address = 0;
707 p->rtl_expr = 0;
708 p->next = temp_slots;
709 temp_slots = p;
710
711 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
712 stack_slot_list);
713
714 best_p->size = rounded_size;
715 best_p->full_size = rounded_size;
716 }
717 }
718
719 p = best_p;
720 }
721
722 /* If we still didn't find one, make a new temporary. */
723 if (p == 0)
724 {
725 HOST_WIDE_INT frame_offset_old = frame_offset;
726
727 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
728
729 /* We are passing an explicit alignment request to assign_stack_local.
730 One side effect of that is assign_stack_local will not round SIZE
731 to ensure the frame offset remains suitably aligned.
732
733 So for requests which depended on the rounding of SIZE, we go ahead
734 and round it now. We also make sure ALIGNMENT is at least
735 BIGGEST_ALIGNMENT. */
736 if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
737 abort();
738 p->slot = assign_stack_local (mode,
739 (mode == BLKmode
740 ? CEIL_ROUND (size, align / BITS_PER_UNIT)
741 : size),
742 align);
743
744 p->align = align;
745 p->alias_set = alias_set;
746
747 /* The following slot size computation is necessary because we don't
748 know the actual size of the temporary slot until assign_stack_local
749 has performed all the frame alignment and size rounding for the
750 requested temporary. Note that extra space added for alignment
751 can be either above or below this stack slot depending on which
752 way the frame grows. We include the extra space if and only if it
753 is above this slot. */
754 #ifdef FRAME_GROWS_DOWNWARD
755 p->size = frame_offset_old - frame_offset;
756 #else
757 p->size = size;
758 #endif
759
760 /* Now define the fields used by combine_temp_slots. */
761 #ifdef FRAME_GROWS_DOWNWARD
762 p->base_offset = frame_offset;
763 p->full_size = frame_offset_old - frame_offset;
764 #else
765 p->base_offset = frame_offset_old;
766 p->full_size = frame_offset - frame_offset_old;
767 #endif
768 p->address = 0;
769 p->next = temp_slots;
770 temp_slots = p;
771 }
772
773 p->in_use = 1;
774 p->addr_taken = 0;
775 p->rtl_expr = seq_rtl_expr;
776
777 if (keep == 2)
778 {
779 p->level = target_temp_slot_level;
780 p->keep = 0;
781 }
782 else if (keep == 3)
783 {
784 p->level = var_temp_slot_level;
785 p->keep = 0;
786 }
787 else
788 {
789 p->level = temp_slot_level;
790 p->keep = keep;
791 }
792
793 /* We may be reusing an old slot, so clear any MEM flags that may have been
794 set from before. */
795 RTX_UNCHANGING_P (p->slot) = 0;
796 MEM_IN_STRUCT_P (p->slot) = 0;
797 MEM_SCALAR_P (p->slot) = 0;
798 MEM_ALIAS_SET (p->slot) = 0;
799 return p->slot;
800 }
801
802 /* Allocate a temporary stack slot and record it for possible later
803 reuse. First three arguments are same as in preceding function. */
804
805 rtx
806 assign_stack_temp (mode, size, keep)
807 enum machine_mode mode;
808 HOST_WIDE_INT size;
809 int keep;
810 {
811 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
812 }
813 \f
814 /* Assign a temporary of given TYPE.
815 KEEP is as for assign_stack_temp.
816 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
817 it is 0 if a register is OK.
818 DONT_PROMOTE is 1 if we should not promote values in register
819 to wider modes. */
820
821 rtx
822 assign_temp (type, keep, memory_required, dont_promote)
823 tree type;
824 int keep;
825 int memory_required;
826 int dont_promote ATTRIBUTE_UNUSED;
827 {
828 enum machine_mode mode = TYPE_MODE (type);
829 #ifndef PROMOTE_FOR_CALL_ONLY
830 int unsignedp = TREE_UNSIGNED (type);
831 #endif
832
833 if (mode == BLKmode || memory_required)
834 {
835 HOST_WIDE_INT size = int_size_in_bytes (type);
836 rtx tmp;
837
838 /* Unfortunately, we don't yet know how to allocate variable-sized
839 temporaries. However, sometimes we have a fixed upper limit on
840 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
841 instead. This is the case for Chill variable-sized strings. */
842 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
843 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
844 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
845 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
846
847 tmp = assign_stack_temp_for_type (mode, size, keep, type);
848 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
849 return tmp;
850 }
851
852 #ifndef PROMOTE_FOR_CALL_ONLY
853 if (! dont_promote)
854 mode = promote_mode (type, mode, &unsignedp, 0);
855 #endif
856
857 return gen_reg_rtx (mode);
858 }
859 \f
860 /* Combine temporary stack slots which are adjacent on the stack.
861
862 This allows for better use of already allocated stack space. This is only
863 done for BLKmode slots because we can be sure that we won't have alignment
864 problems in this case. */
865
866 void
867 combine_temp_slots ()
868 {
869 struct temp_slot *p, *q;
870 struct temp_slot *prev_p, *prev_q;
871 int num_slots;
872
873 /* We can't combine slots, because the information about which slot
874 is in which alias set will be lost. */
875 if (flag_strict_aliasing)
876 return;
877
878 /* If there are a lot of temp slots, don't do anything unless
879 high levels of optimizaton. */
880 if (! flag_expensive_optimizations)
881 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
882 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
883 return;
884
885 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
886 {
887 int delete_p = 0;
888
889 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
890 for (q = p->next, prev_q = p; q; q = prev_q->next)
891 {
892 int delete_q = 0;
893 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
894 {
895 if (p->base_offset + p->full_size == q->base_offset)
896 {
897 /* Q comes after P; combine Q into P. */
898 p->size += q->size;
899 p->full_size += q->full_size;
900 delete_q = 1;
901 }
902 else if (q->base_offset + q->full_size == p->base_offset)
903 {
904 /* P comes after Q; combine P into Q. */
905 q->size += p->size;
906 q->full_size += p->full_size;
907 delete_p = 1;
908 break;
909 }
910 }
911 /* Either delete Q or advance past it. */
912 if (delete_q)
913 prev_q->next = q->next;
914 else
915 prev_q = q;
916 }
917 /* Either delete P or advance past it. */
918 if (delete_p)
919 {
920 if (prev_p)
921 prev_p->next = p->next;
922 else
923 temp_slots = p->next;
924 }
925 else
926 prev_p = p;
927 }
928 }
929 \f
930 /* Find the temp slot corresponding to the object at address X. */
931
932 static struct temp_slot *
933 find_temp_slot_from_address (x)
934 rtx x;
935 {
936 struct temp_slot *p;
937 rtx next;
938
939 for (p = temp_slots; p; p = p->next)
940 {
941 if (! p->in_use)
942 continue;
943
944 else if (XEXP (p->slot, 0) == x
945 || p->address == x
946 || (GET_CODE (x) == PLUS
947 && XEXP (x, 0) == virtual_stack_vars_rtx
948 && GET_CODE (XEXP (x, 1)) == CONST_INT
949 && INTVAL (XEXP (x, 1)) >= p->base_offset
950 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
951 return p;
952
953 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
954 for (next = p->address; next; next = XEXP (next, 1))
955 if (XEXP (next, 0) == x)
956 return p;
957 }
958
959 /* If we have a sum involving a register, see if it points to a temp
960 slot. */
961 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
962 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
963 return p;
964 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
965 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
966 return p;
967
968 return 0;
969 }
970
971 /* Indicate that NEW is an alternate way of referring to the temp slot
972 that previously was known by OLD. */
973
974 void
975 update_temp_slot_address (old, new)
976 rtx old, new;
977 {
978 struct temp_slot *p;
979
980 if (rtx_equal_p (old, new))
981 return;
982
983 p = find_temp_slot_from_address (old);
984
985 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
986 is a register, see if one operand of the PLUS is a temporary
987 location. If so, NEW points into it. Otherwise, if both OLD and
988 NEW are a PLUS and if there is a register in common between them.
989 If so, try a recursive call on those values. */
990 if (p == 0)
991 {
992 if (GET_CODE (old) != PLUS)
993 return;
994
995 if (GET_CODE (new) == REG)
996 {
997 update_temp_slot_address (XEXP (old, 0), new);
998 update_temp_slot_address (XEXP (old, 1), new);
999 return;
1000 }
1001 else if (GET_CODE (new) != PLUS)
1002 return;
1003
1004 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1005 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1006 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1007 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1008 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1009 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1010 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1011 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1012
1013 return;
1014 }
1015
1016 /* Otherwise add an alias for the temp's address. */
1017 else if (p->address == 0)
1018 p->address = new;
1019 else
1020 {
1021 if (GET_CODE (p->address) != EXPR_LIST)
1022 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1023
1024 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1025 }
1026 }
1027
1028 /* If X could be a reference to a temporary slot, mark the fact that its
1029 address was taken. */
1030
1031 void
1032 mark_temp_addr_taken (x)
1033 rtx x;
1034 {
1035 struct temp_slot *p;
1036
1037 if (x == 0)
1038 return;
1039
1040 /* If X is not in memory or is at a constant address, it cannot be in
1041 a temporary slot. */
1042 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1043 return;
1044
1045 p = find_temp_slot_from_address (XEXP (x, 0));
1046 if (p != 0)
1047 p->addr_taken = 1;
1048 }
1049
1050 /* If X could be a reference to a temporary slot, mark that slot as
1051 belonging to the to one level higher than the current level. If X
1052 matched one of our slots, just mark that one. Otherwise, we can't
1053 easily predict which it is, so upgrade all of them. Kept slots
1054 need not be touched.
1055
1056 This is called when an ({...}) construct occurs and a statement
1057 returns a value in memory. */
1058
1059 void
1060 preserve_temp_slots (x)
1061 rtx x;
1062 {
1063 struct temp_slot *p = 0;
1064
1065 /* If there is no result, we still might have some objects whose address
1066 were taken, so we need to make sure they stay around. */
1067 if (x == 0)
1068 {
1069 for (p = temp_slots; p; p = p->next)
1070 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1071 p->level--;
1072
1073 return;
1074 }
1075
1076 /* If X is a register that is being used as a pointer, see if we have
1077 a temporary slot we know it points to. To be consistent with
1078 the code below, we really should preserve all non-kept slots
1079 if we can't find a match, but that seems to be much too costly. */
1080 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1081 p = find_temp_slot_from_address (x);
1082
1083 /* If X is not in memory or is at a constant address, it cannot be in
1084 a temporary slot, but it can contain something whose address was
1085 taken. */
1086 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1087 {
1088 for (p = temp_slots; p; p = p->next)
1089 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1090 p->level--;
1091
1092 return;
1093 }
1094
1095 /* First see if we can find a match. */
1096 if (p == 0)
1097 p = find_temp_slot_from_address (XEXP (x, 0));
1098
1099 if (p != 0)
1100 {
1101 /* Move everything at our level whose address was taken to our new
1102 level in case we used its address. */
1103 struct temp_slot *q;
1104
1105 if (p->level == temp_slot_level)
1106 {
1107 for (q = temp_slots; q; q = q->next)
1108 if (q != p && q->addr_taken && q->level == p->level)
1109 q->level--;
1110
1111 p->level--;
1112 p->addr_taken = 0;
1113 }
1114 return;
1115 }
1116
1117 /* Otherwise, preserve all non-kept slots at this level. */
1118 for (p = temp_slots; p; p = p->next)
1119 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1120 p->level--;
1121 }
1122
1123 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1124 with that RTL_EXPR, promote it into a temporary slot at the present
1125 level so it will not be freed when we free slots made in the
1126 RTL_EXPR. */
1127
1128 void
1129 preserve_rtl_expr_result (x)
1130 rtx x;
1131 {
1132 struct temp_slot *p;
1133
1134 /* If X is not in memory or is at a constant address, it cannot be in
1135 a temporary slot. */
1136 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1137 return;
1138
1139 /* If we can find a match, move it to our level unless it is already at
1140 an upper level. */
1141 p = find_temp_slot_from_address (XEXP (x, 0));
1142 if (p != 0)
1143 {
1144 p->level = MIN (p->level, temp_slot_level);
1145 p->rtl_expr = 0;
1146 }
1147
1148 return;
1149 }
1150
1151 /* Free all temporaries used so far. This is normally called at the end
1152 of generating code for a statement. Don't free any temporaries
1153 currently in use for an RTL_EXPR that hasn't yet been emitted.
1154 We could eventually do better than this since it can be reused while
1155 generating the same RTL_EXPR, but this is complex and probably not
1156 worthwhile. */
1157
1158 void
1159 free_temp_slots ()
1160 {
1161 struct temp_slot *p;
1162
1163 for (p = temp_slots; p; p = p->next)
1164 if (p->in_use && p->level == temp_slot_level && ! p->keep
1165 && p->rtl_expr == 0)
1166 p->in_use = 0;
1167
1168 combine_temp_slots ();
1169 }
1170
1171 /* Free all temporary slots used in T, an RTL_EXPR node. */
1172
1173 void
1174 free_temps_for_rtl_expr (t)
1175 tree t;
1176 {
1177 struct temp_slot *p;
1178
1179 for (p = temp_slots; p; p = p->next)
1180 if (p->rtl_expr == t)
1181 p->in_use = 0;
1182
1183 combine_temp_slots ();
1184 }
1185
1186 /* Mark all temporaries ever allocated in this function as not suitable
1187 for reuse until the current level is exited. */
1188
1189 void
1190 mark_all_temps_used ()
1191 {
1192 struct temp_slot *p;
1193
1194 for (p = temp_slots; p; p = p->next)
1195 {
1196 p->in_use = p->keep = 1;
1197 p->level = MIN (p->level, temp_slot_level);
1198 }
1199 }
1200
1201 /* Push deeper into the nesting level for stack temporaries. */
1202
1203 void
1204 push_temp_slots ()
1205 {
1206 temp_slot_level++;
1207 }
1208
1209 /* Likewise, but save the new level as the place to allocate variables
1210 for blocks. */
1211
1212 #if 0
1213 void
1214 push_temp_slots_for_block ()
1215 {
1216 push_temp_slots ();
1217
1218 var_temp_slot_level = temp_slot_level;
1219 }
1220
1221 /* Likewise, but save the new level as the place to allocate temporaries
1222 for TARGET_EXPRs. */
1223
1224 void
1225 push_temp_slots_for_target ()
1226 {
1227 push_temp_slots ();
1228
1229 target_temp_slot_level = temp_slot_level;
1230 }
1231
1232 /* Set and get the value of target_temp_slot_level. The only
1233 permitted use of these functions is to save and restore this value. */
1234
1235 int
1236 get_target_temp_slot_level ()
1237 {
1238 return target_temp_slot_level;
1239 }
1240
1241 void
1242 set_target_temp_slot_level (level)
1243 int level;
1244 {
1245 target_temp_slot_level = level;
1246 }
1247 #endif
1248
1249 /* Pop a temporary nesting level. All slots in use in the current level
1250 are freed. */
1251
1252 void
1253 pop_temp_slots ()
1254 {
1255 struct temp_slot *p;
1256
1257 for (p = temp_slots; p; p = p->next)
1258 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1259 p->in_use = 0;
1260
1261 combine_temp_slots ();
1262
1263 temp_slot_level--;
1264 }
1265
1266 /* Initialize temporary slots. */
1267
1268 void
1269 init_temp_slots ()
1270 {
1271 /* We have not allocated any temporaries yet. */
1272 temp_slots = 0;
1273 temp_slot_level = 0;
1274 var_temp_slot_level = 0;
1275 target_temp_slot_level = 0;
1276 }
1277 \f
1278 /* Retroactively move an auto variable from a register to a stack slot.
1279 This is done when an address-reference to the variable is seen. */
1280
1281 void
1282 put_var_into_stack (decl)
1283 tree decl;
1284 {
1285 register rtx reg;
1286 enum machine_mode promoted_mode, decl_mode;
1287 struct function *function = 0;
1288 tree context;
1289 int can_use_addressof;
1290
1291 context = decl_function_context (decl);
1292
1293 /* Get the current rtl used for this object and its original mode. */
1294 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1295
1296 /* No need to do anything if decl has no rtx yet
1297 since in that case caller is setting TREE_ADDRESSABLE
1298 and a stack slot will be assigned when the rtl is made. */
1299 if (reg == 0)
1300 return;
1301
1302 /* Get the declared mode for this object. */
1303 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1304 : DECL_MODE (decl));
1305 /* Get the mode it's actually stored in. */
1306 promoted_mode = GET_MODE (reg);
1307
1308 /* If this variable comes from an outer function,
1309 find that function's saved context. */
1310 if (context != current_function_decl && context != inline_function_decl)
1311 for (function = outer_function_chain; function; function = function->next)
1312 if (function->decl == context)
1313 break;
1314
1315 /* If this is a variable-size object with a pseudo to address it,
1316 put that pseudo into the stack, if the var is nonlocal. */
1317 if (DECL_NONLOCAL (decl)
1318 && GET_CODE (reg) == MEM
1319 && GET_CODE (XEXP (reg, 0)) == REG
1320 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1321 {
1322 reg = XEXP (reg, 0);
1323 decl_mode = promoted_mode = GET_MODE (reg);
1324 }
1325
1326 can_use_addressof
1327 = (function == 0
1328 && optimize > 0
1329 /* FIXME make it work for promoted modes too */
1330 && decl_mode == promoted_mode
1331 #ifdef NON_SAVING_SETJMP
1332 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1333 #endif
1334 );
1335
1336 /* If we can't use ADDRESSOF, make sure we see through one we already
1337 generated. */
1338 if (! can_use_addressof && GET_CODE (reg) == MEM
1339 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1340 reg = XEXP (XEXP (reg, 0), 0);
1341
1342 /* Now we should have a value that resides in one or more pseudo regs. */
1343
1344 if (GET_CODE (reg) == REG)
1345 {
1346 /* If this variable lives in the current function and we don't need
1347 to put things in the stack for the sake of setjmp, try to keep it
1348 in a register until we know we actually need the address. */
1349 if (can_use_addressof)
1350 gen_mem_addressof (reg, decl);
1351 else
1352 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1353 promoted_mode, decl_mode,
1354 TREE_SIDE_EFFECTS (decl), 0,
1355 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1356 0);
1357 }
1358 else if (GET_CODE (reg) == CONCAT)
1359 {
1360 /* A CONCAT contains two pseudos; put them both in the stack.
1361 We do it so they end up consecutive. */
1362 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1363 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1364 #ifdef FRAME_GROWS_DOWNWARD
1365 /* Since part 0 should have a lower address, do it second. */
1366 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1367 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1368 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1369 0);
1370 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1371 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1372 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1373 0);
1374 #else
1375 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1376 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1377 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1378 0);
1379 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1380 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1381 TREE_USED (decl) || DECL_INITIAL (decl) != 0,
1382 0);
1383 #endif
1384
1385 /* Change the CONCAT into a combined MEM for both parts. */
1386 PUT_CODE (reg, MEM);
1387 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1388 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1389
1390 /* The two parts are in memory order already.
1391 Use the lower parts address as ours. */
1392 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1393 /* Prevent sharing of rtl that might lose. */
1394 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1395 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1396 }
1397 else
1398 return;
1399
1400 if (current_function_check_memory_usage)
1401 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1402 XEXP (reg, 0), Pmode,
1403 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1404 TYPE_MODE (sizetype),
1405 GEN_INT (MEMORY_USE_RW),
1406 TYPE_MODE (integer_type_node));
1407 }
1408
1409 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1410 into the stack frame of FUNCTION (0 means the current function).
1411 DECL_MODE is the machine mode of the user-level data type.
1412 PROMOTED_MODE is the machine mode of the register.
1413 VOLATILE_P is nonzero if this is for a "volatile" decl.
1414 USED_P is nonzero if this reg might have already been used in an insn. */
1415
1416 static void
1417 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1418 original_regno, used_p, ht)
1419 struct function *function;
1420 rtx reg;
1421 tree type;
1422 enum machine_mode promoted_mode, decl_mode;
1423 int volatile_p;
1424 int original_regno;
1425 int used_p;
1426 struct hash_table *ht;
1427 {
1428 struct function *func = function ? function : cfun;
1429 rtx new = 0;
1430 int regno = original_regno;
1431
1432 if (regno == 0)
1433 regno = REGNO (reg);
1434
1435 if (regno < func->x_max_parm_reg)
1436 new = func->x_parm_reg_stack_loc[regno];
1437 if (new == 0)
1438 new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
1439
1440 PUT_CODE (reg, MEM);
1441 PUT_MODE (reg, decl_mode);
1442 XEXP (reg, 0) = XEXP (new, 0);
1443 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1444 MEM_VOLATILE_P (reg) = volatile_p;
1445
1446 /* If this is a memory ref that contains aggregate components,
1447 mark it as such for cse and loop optimize. If we are reusing a
1448 previously generated stack slot, then we need to copy the bit in
1449 case it was set for other reasons. For instance, it is set for
1450 __builtin_va_alist. */
1451 MEM_SET_IN_STRUCT_P (reg,
1452 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1453 MEM_ALIAS_SET (reg) = get_alias_set (type);
1454
1455 /* Now make sure that all refs to the variable, previously made
1456 when it was a register, are fixed up to be valid again. */
1457
1458 if (used_p && function != 0)
1459 {
1460 struct var_refs_queue *temp;
1461
1462 temp
1463 = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
1464 temp->modified = reg;
1465 temp->promoted_mode = promoted_mode;
1466 temp->unsignedp = TREE_UNSIGNED (type);
1467 temp->next = function->fixup_var_refs_queue;
1468 function->fixup_var_refs_queue = temp;
1469 }
1470 else if (used_p)
1471 /* Variable is local; fix it up now. */
1472 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type), ht);
1473 }
1474 \f
1475 static void
1476 fixup_var_refs (var, promoted_mode, unsignedp, ht)
1477 rtx var;
1478 enum machine_mode promoted_mode;
1479 int unsignedp;
1480 struct hash_table *ht;
1481 {
1482 tree pending;
1483 rtx first_insn = get_insns ();
1484 struct sequence_stack *stack = seq_stack;
1485 tree rtl_exps = rtl_expr_chain;
1486
1487 /* Must scan all insns for stack-refs that exceed the limit. */
1488 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
1489 stack == 0, ht);
1490 /* If there's a hash table, it must record all uses of VAR. */
1491 if (ht)
1492 return;
1493
1494 /* Scan all pending sequences too. */
1495 for (; stack; stack = stack->next)
1496 {
1497 push_to_sequence (stack->first);
1498 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1499 stack->first, stack->next != 0, 0);
1500 /* Update remembered end of sequence
1501 in case we added an insn at the end. */
1502 stack->last = get_last_insn ();
1503 end_sequence ();
1504 }
1505
1506 /* Scan all waiting RTL_EXPRs too. */
1507 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1508 {
1509 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1510 if (seq != const0_rtx && seq != 0)
1511 {
1512 push_to_sequence (seq);
1513 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0,
1514 0);
1515 end_sequence ();
1516 }
1517 }
1518
1519 /* Scan the catch clauses for exception handling too. */
1520 push_to_sequence (catch_clauses);
1521 fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
1522 0, 0);
1523 end_sequence ();
1524 }
1525 \f
1526 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1527 some part of an insn. Return a struct fixup_replacement whose OLD
1528 value is equal to X. Allocate a new structure if no such entry exists. */
1529
1530 static struct fixup_replacement *
1531 find_fixup_replacement (replacements, x)
1532 struct fixup_replacement **replacements;
1533 rtx x;
1534 {
1535 struct fixup_replacement *p;
1536
1537 /* See if we have already replaced this. */
1538 for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1539 ;
1540
1541 if (p == 0)
1542 {
1543 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1544 p->old = x;
1545 p->new = 0;
1546 p->next = *replacements;
1547 *replacements = p;
1548 }
1549
1550 return p;
1551 }
1552
1553 /* Scan the insn-chain starting with INSN for refs to VAR
1554 and fix them up. TOPLEVEL is nonzero if this chain is the
1555 main chain of insns for the current function. */
1556
1557 static void
1558 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel, ht)
1559 rtx var;
1560 enum machine_mode promoted_mode;
1561 int unsignedp;
1562 rtx insn;
1563 int toplevel;
1564 struct hash_table *ht;
1565 {
1566 rtx call_dest = 0;
1567 rtx insn_list = NULL_RTX;
1568
1569 /* If we already know which INSNs reference VAR there's no need
1570 to walk the entire instruction chain. */
1571 if (ht)
1572 {
1573 insn_list = ((struct insns_for_mem_entry *)
1574 hash_lookup (ht, var, /*create=*/0, /*copy=*/0))->insns;
1575 insn = insn_list ? XEXP (insn_list, 0) : NULL_RTX;
1576 insn_list = XEXP (insn_list, 1);
1577 }
1578
1579 while (insn)
1580 {
1581 rtx next = NEXT_INSN (insn);
1582 rtx set, prev, prev_set;
1583 rtx note;
1584
1585 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1586 {
1587 /* Remember the notes in case we delete the insn. */
1588 note = REG_NOTES (insn);
1589
1590 /* If this is a CLOBBER of VAR, delete it.
1591
1592 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1593 and REG_RETVAL notes too. */
1594 if (GET_CODE (PATTERN (insn)) == CLOBBER
1595 && (XEXP (PATTERN (insn), 0) == var
1596 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1597 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1598 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1599 {
1600 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1601 /* The REG_LIBCALL note will go away since we are going to
1602 turn INSN into a NOTE, so just delete the
1603 corresponding REG_RETVAL note. */
1604 remove_note (XEXP (note, 0),
1605 find_reg_note (XEXP (note, 0), REG_RETVAL,
1606 NULL_RTX));
1607
1608 /* In unoptimized compilation, we shouldn't call delete_insn
1609 except in jump.c doing warnings. */
1610 PUT_CODE (insn, NOTE);
1611 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1612 NOTE_SOURCE_FILE (insn) = 0;
1613 }
1614
1615 /* The insn to load VAR from a home in the arglist
1616 is now a no-op. When we see it, just delete it.
1617 Similarly if this is storing VAR from a register from which
1618 it was loaded in the previous insn. This will occur
1619 when an ADDRESSOF was made for an arglist slot. */
1620 else if (toplevel
1621 && (set = single_set (insn)) != 0
1622 && SET_DEST (set) == var
1623 /* If this represents the result of an insn group,
1624 don't delete the insn. */
1625 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1626 && (rtx_equal_p (SET_SRC (set), var)
1627 || (GET_CODE (SET_SRC (set)) == REG
1628 && (prev = prev_nonnote_insn (insn)) != 0
1629 && (prev_set = single_set (prev)) != 0
1630 && SET_DEST (prev_set) == SET_SRC (set)
1631 && rtx_equal_p (SET_SRC (prev_set), var))))
1632 {
1633 /* In unoptimized compilation, we shouldn't call delete_insn
1634 except in jump.c doing warnings. */
1635 PUT_CODE (insn, NOTE);
1636 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1637 NOTE_SOURCE_FILE (insn) = 0;
1638 if (insn == last_parm_insn)
1639 last_parm_insn = PREV_INSN (next);
1640 }
1641 else
1642 {
1643 struct fixup_replacement *replacements = 0;
1644 rtx next_insn = NEXT_INSN (insn);
1645
1646 if (SMALL_REGISTER_CLASSES)
1647 {
1648 /* If the insn that copies the results of a CALL_INSN
1649 into a pseudo now references VAR, we have to use an
1650 intermediate pseudo since we want the life of the
1651 return value register to be only a single insn.
1652
1653 If we don't use an intermediate pseudo, such things as
1654 address computations to make the address of VAR valid
1655 if it is not can be placed between the CALL_INSN and INSN.
1656
1657 To make sure this doesn't happen, we record the destination
1658 of the CALL_INSN and see if the next insn uses both that
1659 and VAR. */
1660
1661 if (call_dest != 0 && GET_CODE (insn) == INSN
1662 && reg_mentioned_p (var, PATTERN (insn))
1663 && reg_mentioned_p (call_dest, PATTERN (insn)))
1664 {
1665 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1666
1667 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1668
1669 PATTERN (insn) = replace_rtx (PATTERN (insn),
1670 call_dest, temp);
1671 }
1672
1673 if (GET_CODE (insn) == CALL_INSN
1674 && GET_CODE (PATTERN (insn)) == SET)
1675 call_dest = SET_DEST (PATTERN (insn));
1676 else if (GET_CODE (insn) == CALL_INSN
1677 && GET_CODE (PATTERN (insn)) == PARALLEL
1678 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1679 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1680 else
1681 call_dest = 0;
1682 }
1683
1684 /* See if we have to do anything to INSN now that VAR is in
1685 memory. If it needs to be loaded into a pseudo, use a single
1686 pseudo for the entire insn in case there is a MATCH_DUP
1687 between two operands. We pass a pointer to the head of
1688 a list of struct fixup_replacements. If fixup_var_refs_1
1689 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1690 it will record them in this list.
1691
1692 If it allocated a pseudo for any replacement, we copy into
1693 it here. */
1694
1695 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1696 &replacements);
1697
1698 /* If this is last_parm_insn, and any instructions were output
1699 after it to fix it up, then we must set last_parm_insn to
1700 the last such instruction emitted. */
1701 if (insn == last_parm_insn)
1702 last_parm_insn = PREV_INSN (next_insn);
1703
1704 while (replacements)
1705 {
1706 if (GET_CODE (replacements->new) == REG)
1707 {
1708 rtx insert_before;
1709 rtx seq;
1710
1711 /* OLD might be a (subreg (mem)). */
1712 if (GET_CODE (replacements->old) == SUBREG)
1713 replacements->old
1714 = fixup_memory_subreg (replacements->old, insn, 0);
1715 else
1716 replacements->old
1717 = fixup_stack_1 (replacements->old, insn);
1718
1719 insert_before = insn;
1720
1721 /* If we are changing the mode, do a conversion.
1722 This might be wasteful, but combine.c will
1723 eliminate much of the waste. */
1724
1725 if (GET_MODE (replacements->new)
1726 != GET_MODE (replacements->old))
1727 {
1728 start_sequence ();
1729 convert_move (replacements->new,
1730 replacements->old, unsignedp);
1731 seq = gen_sequence ();
1732 end_sequence ();
1733 }
1734 else
1735 seq = gen_move_insn (replacements->new,
1736 replacements->old);
1737
1738 emit_insn_before (seq, insert_before);
1739 }
1740
1741 replacements = replacements->next;
1742 }
1743 }
1744
1745 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1746 But don't touch other insns referred to by reg-notes;
1747 we will get them elsewhere. */
1748 while (note)
1749 {
1750 if (GET_CODE (note) != INSN_LIST)
1751 XEXP (note, 0)
1752 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1753 note = XEXP (note, 1);
1754 }
1755 }
1756
1757 if (!ht)
1758 insn = next;
1759 else if (insn_list)
1760 {
1761 insn = XEXP (insn_list, 0);
1762 insn_list = XEXP (insn_list, 1);
1763 }
1764 else
1765 insn = NULL_RTX;
1766 }
1767 }
1768 \f
1769 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1770 See if the rtx expression at *LOC in INSN needs to be changed.
1771
1772 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1773 contain a list of original rtx's and replacements. If we find that we need
1774 to modify this insn by replacing a memory reference with a pseudo or by
1775 making a new MEM to implement a SUBREG, we consult that list to see if
1776 we have already chosen a replacement. If none has already been allocated,
1777 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1778 or the SUBREG, as appropriate, to the pseudo. */
1779
1780 static void
1781 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1782 register rtx var;
1783 enum machine_mode promoted_mode;
1784 register rtx *loc;
1785 rtx insn;
1786 struct fixup_replacement **replacements;
1787 {
1788 register int i;
1789 register rtx x = *loc;
1790 RTX_CODE code = GET_CODE (x);
1791 register const char *fmt;
1792 register rtx tem, tem1;
1793 struct fixup_replacement *replacement;
1794
1795 switch (code)
1796 {
1797 case ADDRESSOF:
1798 if (XEXP (x, 0) == var)
1799 {
1800 /* Prevent sharing of rtl that might lose. */
1801 rtx sub = copy_rtx (XEXP (var, 0));
1802
1803 if (! validate_change (insn, loc, sub, 0))
1804 {
1805 rtx y = gen_reg_rtx (GET_MODE (sub));
1806 rtx seq, new_insn;
1807
1808 /* We should be able to replace with a register or all is lost.
1809 Note that we can't use validate_change to verify this, since
1810 we're not caring for replacing all dups simultaneously. */
1811 if (! validate_replace_rtx (*loc, y, insn))
1812 abort ();
1813
1814 /* Careful! First try to recognize a direct move of the
1815 value, mimicking how things are done in gen_reload wrt
1816 PLUS. Consider what happens when insn is a conditional
1817 move instruction and addsi3 clobbers flags. */
1818
1819 start_sequence ();
1820 new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1821 seq = gen_sequence ();
1822 end_sequence ();
1823
1824 if (recog_memoized (new_insn) < 0)
1825 {
1826 /* That failed. Fall back on force_operand and hope. */
1827
1828 start_sequence ();
1829 force_operand (sub, y);
1830 seq = gen_sequence ();
1831 end_sequence ();
1832 }
1833
1834 #ifdef HAVE_cc0
1835 /* Don't separate setter from user. */
1836 if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
1837 insn = PREV_INSN (insn);
1838 #endif
1839
1840 emit_insn_before (seq, insn);
1841 }
1842 }
1843 return;
1844
1845 case MEM:
1846 if (var == x)
1847 {
1848 /* If we already have a replacement, use it. Otherwise,
1849 try to fix up this address in case it is invalid. */
1850
1851 replacement = find_fixup_replacement (replacements, var);
1852 if (replacement->new)
1853 {
1854 *loc = replacement->new;
1855 return;
1856 }
1857
1858 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1859
1860 /* Unless we are forcing memory to register or we changed the mode,
1861 we can leave things the way they are if the insn is valid. */
1862
1863 INSN_CODE (insn) = -1;
1864 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1865 && recog_memoized (insn) >= 0)
1866 return;
1867
1868 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1869 return;
1870 }
1871
1872 /* If X contains VAR, we need to unshare it here so that we update
1873 each occurrence separately. But all identical MEMs in one insn
1874 must be replaced with the same rtx because of the possibility of
1875 MATCH_DUPs. */
1876
1877 if (reg_mentioned_p (var, x))
1878 {
1879 replacement = find_fixup_replacement (replacements, x);
1880 if (replacement->new == 0)
1881 replacement->new = copy_most_rtx (x, var);
1882
1883 *loc = x = replacement->new;
1884 }
1885 break;
1886
1887 case REG:
1888 case CC0:
1889 case PC:
1890 case CONST_INT:
1891 case CONST:
1892 case SYMBOL_REF:
1893 case LABEL_REF:
1894 case CONST_DOUBLE:
1895 return;
1896
1897 case SIGN_EXTRACT:
1898 case ZERO_EXTRACT:
1899 /* Note that in some cases those types of expressions are altered
1900 by optimize_bit_field, and do not survive to get here. */
1901 if (XEXP (x, 0) == var
1902 || (GET_CODE (XEXP (x, 0)) == SUBREG
1903 && SUBREG_REG (XEXP (x, 0)) == var))
1904 {
1905 /* Get TEM as a valid MEM in the mode presently in the insn.
1906
1907 We don't worry about the possibility of MATCH_DUP here; it
1908 is highly unlikely and would be tricky to handle. */
1909
1910 tem = XEXP (x, 0);
1911 if (GET_CODE (tem) == SUBREG)
1912 {
1913 if (GET_MODE_BITSIZE (GET_MODE (tem))
1914 > GET_MODE_BITSIZE (GET_MODE (var)))
1915 {
1916 replacement = find_fixup_replacement (replacements, var);
1917 if (replacement->new == 0)
1918 replacement->new = gen_reg_rtx (GET_MODE (var));
1919 SUBREG_REG (tem) = replacement->new;
1920 }
1921 else
1922 tem = fixup_memory_subreg (tem, insn, 0);
1923 }
1924 else
1925 tem = fixup_stack_1 (tem, insn);
1926
1927 /* Unless we want to load from memory, get TEM into the proper mode
1928 for an extract from memory. This can only be done if the
1929 extract is at a constant position and length. */
1930
1931 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1932 && GET_CODE (XEXP (x, 2)) == CONST_INT
1933 && ! mode_dependent_address_p (XEXP (tem, 0))
1934 && ! MEM_VOLATILE_P (tem))
1935 {
1936 enum machine_mode wanted_mode = VOIDmode;
1937 enum machine_mode is_mode = GET_MODE (tem);
1938 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1939
1940 #ifdef HAVE_extzv
1941 if (GET_CODE (x) == ZERO_EXTRACT)
1942 {
1943 wanted_mode
1944 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
1945 if (wanted_mode == VOIDmode)
1946 wanted_mode = word_mode;
1947 }
1948 #endif
1949 #ifdef HAVE_extv
1950 if (GET_CODE (x) == SIGN_EXTRACT)
1951 {
1952 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
1953 if (wanted_mode == VOIDmode)
1954 wanted_mode = word_mode;
1955 }
1956 #endif
1957 /* If we have a narrower mode, we can do something. */
1958 if (wanted_mode != VOIDmode
1959 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1960 {
1961 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1962 rtx old_pos = XEXP (x, 2);
1963 rtx newmem;
1964
1965 /* If the bytes and bits are counted differently, we
1966 must adjust the offset. */
1967 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1968 offset = (GET_MODE_SIZE (is_mode)
1969 - GET_MODE_SIZE (wanted_mode) - offset);
1970
1971 pos %= GET_MODE_BITSIZE (wanted_mode);
1972
1973 newmem = gen_rtx_MEM (wanted_mode,
1974 plus_constant (XEXP (tem, 0), offset));
1975 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1976 MEM_COPY_ATTRIBUTES (newmem, tem);
1977
1978 /* Make the change and see if the insn remains valid. */
1979 INSN_CODE (insn) = -1;
1980 XEXP (x, 0) = newmem;
1981 XEXP (x, 2) = GEN_INT (pos);
1982
1983 if (recog_memoized (insn) >= 0)
1984 return;
1985
1986 /* Otherwise, restore old position. XEXP (x, 0) will be
1987 restored later. */
1988 XEXP (x, 2) = old_pos;
1989 }
1990 }
1991
1992 /* If we get here, the bitfield extract insn can't accept a memory
1993 reference. Copy the input into a register. */
1994
1995 tem1 = gen_reg_rtx (GET_MODE (tem));
1996 emit_insn_before (gen_move_insn (tem1, tem), insn);
1997 XEXP (x, 0) = tem1;
1998 return;
1999 }
2000 break;
2001
2002 case SUBREG:
2003 if (SUBREG_REG (x) == var)
2004 {
2005 /* If this is a special SUBREG made because VAR was promoted
2006 from a wider mode, replace it with VAR and call ourself
2007 recursively, this time saying that the object previously
2008 had its current mode (by virtue of the SUBREG). */
2009
2010 if (SUBREG_PROMOTED_VAR_P (x))
2011 {
2012 *loc = var;
2013 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2014 return;
2015 }
2016
2017 /* If this SUBREG makes VAR wider, it has become a paradoxical
2018 SUBREG with VAR in memory, but these aren't allowed at this
2019 stage of the compilation. So load VAR into a pseudo and take
2020 a SUBREG of that pseudo. */
2021 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2022 {
2023 replacement = find_fixup_replacement (replacements, var);
2024 if (replacement->new == 0)
2025 replacement->new = gen_reg_rtx (GET_MODE (var));
2026 SUBREG_REG (x) = replacement->new;
2027 return;
2028 }
2029
2030 /* See if we have already found a replacement for this SUBREG.
2031 If so, use it. Otherwise, make a MEM and see if the insn
2032 is recognized. If not, or if we should force MEM into a register,
2033 make a pseudo for this SUBREG. */
2034 replacement = find_fixup_replacement (replacements, x);
2035 if (replacement->new)
2036 {
2037 *loc = replacement->new;
2038 return;
2039 }
2040
2041 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2042
2043 INSN_CODE (insn) = -1;
2044 if (! flag_force_mem && recog_memoized (insn) >= 0)
2045 return;
2046
2047 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2048 return;
2049 }
2050 break;
2051
2052 case SET:
2053 /* First do special simplification of bit-field references. */
2054 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2055 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2056 optimize_bit_field (x, insn, 0);
2057 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2058 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2059 optimize_bit_field (x, insn, NULL_PTR);
2060
2061 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2062 into a register and then store it back out. */
2063 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2064 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2065 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2066 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2067 > GET_MODE_SIZE (GET_MODE (var))))
2068 {
2069 replacement = find_fixup_replacement (replacements, var);
2070 if (replacement->new == 0)
2071 replacement->new = gen_reg_rtx (GET_MODE (var));
2072
2073 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2074 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2075 }
2076
2077 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2078 insn into a pseudo and store the low part of the pseudo into VAR. */
2079 if (GET_CODE (SET_DEST (x)) == SUBREG
2080 && SUBREG_REG (SET_DEST (x)) == var
2081 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2082 > GET_MODE_SIZE (GET_MODE (var))))
2083 {
2084 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2085 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2086 tem)),
2087 insn);
2088 break;
2089 }
2090
2091 {
2092 rtx dest = SET_DEST (x);
2093 rtx src = SET_SRC (x);
2094 #ifdef HAVE_insv
2095 rtx outerdest = dest;
2096 #endif
2097
2098 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2099 || GET_CODE (dest) == SIGN_EXTRACT
2100 || GET_CODE (dest) == ZERO_EXTRACT)
2101 dest = XEXP (dest, 0);
2102
2103 if (GET_CODE (src) == SUBREG)
2104 src = XEXP (src, 0);
2105
2106 /* If VAR does not appear at the top level of the SET
2107 just scan the lower levels of the tree. */
2108
2109 if (src != var && dest != var)
2110 break;
2111
2112 /* We will need to rerecognize this insn. */
2113 INSN_CODE (insn) = -1;
2114
2115 #ifdef HAVE_insv
2116 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2117 {
2118 /* Since this case will return, ensure we fixup all the
2119 operands here. */
2120 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2121 insn, replacements);
2122 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2123 insn, replacements);
2124 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2125 insn, replacements);
2126
2127 tem = XEXP (outerdest, 0);
2128
2129 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2130 that may appear inside a ZERO_EXTRACT.
2131 This was legitimate when the MEM was a REG. */
2132 if (GET_CODE (tem) == SUBREG
2133 && SUBREG_REG (tem) == var)
2134 tem = fixup_memory_subreg (tem, insn, 0);
2135 else
2136 tem = fixup_stack_1 (tem, insn);
2137
2138 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2139 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2140 && ! mode_dependent_address_p (XEXP (tem, 0))
2141 && ! MEM_VOLATILE_P (tem))
2142 {
2143 enum machine_mode wanted_mode;
2144 enum machine_mode is_mode = GET_MODE (tem);
2145 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2146
2147 wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
2148 if (wanted_mode == VOIDmode)
2149 wanted_mode = word_mode;
2150
2151 /* If we have a narrower mode, we can do something. */
2152 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2153 {
2154 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2155 rtx old_pos = XEXP (outerdest, 2);
2156 rtx newmem;
2157
2158 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2159 offset = (GET_MODE_SIZE (is_mode)
2160 - GET_MODE_SIZE (wanted_mode) - offset);
2161
2162 pos %= GET_MODE_BITSIZE (wanted_mode);
2163
2164 newmem = gen_rtx_MEM (wanted_mode,
2165 plus_constant (XEXP (tem, 0),
2166 offset));
2167 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2168 MEM_COPY_ATTRIBUTES (newmem, tem);
2169
2170 /* Make the change and see if the insn remains valid. */
2171 INSN_CODE (insn) = -1;
2172 XEXP (outerdest, 0) = newmem;
2173 XEXP (outerdest, 2) = GEN_INT (pos);
2174
2175 if (recog_memoized (insn) >= 0)
2176 return;
2177
2178 /* Otherwise, restore old position. XEXP (x, 0) will be
2179 restored later. */
2180 XEXP (outerdest, 2) = old_pos;
2181 }
2182 }
2183
2184 /* If we get here, the bit-field store doesn't allow memory
2185 or isn't located at a constant position. Load the value into
2186 a register, do the store, and put it back into memory. */
2187
2188 tem1 = gen_reg_rtx (GET_MODE (tem));
2189 emit_insn_before (gen_move_insn (tem1, tem), insn);
2190 emit_insn_after (gen_move_insn (tem, tem1), insn);
2191 XEXP (outerdest, 0) = tem1;
2192 return;
2193 }
2194 #endif
2195
2196 /* STRICT_LOW_PART is a no-op on memory references
2197 and it can cause combinations to be unrecognizable,
2198 so eliminate it. */
2199
2200 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2201 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2202
2203 /* A valid insn to copy VAR into or out of a register
2204 must be left alone, to avoid an infinite loop here.
2205 If the reference to VAR is by a subreg, fix that up,
2206 since SUBREG is not valid for a memref.
2207 Also fix up the address of the stack slot.
2208
2209 Note that we must not try to recognize the insn until
2210 after we know that we have valid addresses and no
2211 (subreg (mem ...) ...) constructs, since these interfere
2212 with determining the validity of the insn. */
2213
2214 if ((SET_SRC (x) == var
2215 || (GET_CODE (SET_SRC (x)) == SUBREG
2216 && SUBREG_REG (SET_SRC (x)) == var))
2217 && (GET_CODE (SET_DEST (x)) == REG
2218 || (GET_CODE (SET_DEST (x)) == SUBREG
2219 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2220 && GET_MODE (var) == promoted_mode
2221 && x == single_set (insn))
2222 {
2223 rtx pat;
2224
2225 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2226 if (replacement->new)
2227 SET_SRC (x) = replacement->new;
2228 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2229 SET_SRC (x) = replacement->new
2230 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2231 else
2232 SET_SRC (x) = replacement->new
2233 = fixup_stack_1 (SET_SRC (x), insn);
2234
2235 if (recog_memoized (insn) >= 0)
2236 return;
2237
2238 /* INSN is not valid, but we know that we want to
2239 copy SET_SRC (x) to SET_DEST (x) in some way. So
2240 we generate the move and see whether it requires more
2241 than one insn. If it does, we emit those insns and
2242 delete INSN. Otherwise, we an just replace the pattern
2243 of INSN; we have already verified above that INSN has
2244 no other function that to do X. */
2245
2246 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2247 if (GET_CODE (pat) == SEQUENCE)
2248 {
2249 emit_insn_after (pat, insn);
2250 PUT_CODE (insn, NOTE);
2251 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2252 NOTE_SOURCE_FILE (insn) = 0;
2253 }
2254 else
2255 PATTERN (insn) = pat;
2256
2257 return;
2258 }
2259
2260 if ((SET_DEST (x) == var
2261 || (GET_CODE (SET_DEST (x)) == SUBREG
2262 && SUBREG_REG (SET_DEST (x)) == var))
2263 && (GET_CODE (SET_SRC (x)) == REG
2264 || (GET_CODE (SET_SRC (x)) == SUBREG
2265 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2266 && GET_MODE (var) == promoted_mode
2267 && x == single_set (insn))
2268 {
2269 rtx pat;
2270
2271 if (GET_CODE (SET_DEST (x)) == SUBREG)
2272 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2273 else
2274 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2275
2276 if (recog_memoized (insn) >= 0)
2277 return;
2278
2279 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2280 if (GET_CODE (pat) == SEQUENCE)
2281 {
2282 emit_insn_after (pat, insn);
2283 PUT_CODE (insn, NOTE);
2284 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2285 NOTE_SOURCE_FILE (insn) = 0;
2286 }
2287 else
2288 PATTERN (insn) = pat;
2289
2290 return;
2291 }
2292
2293 /* Otherwise, storing into VAR must be handled specially
2294 by storing into a temporary and copying that into VAR
2295 with a new insn after this one. Note that this case
2296 will be used when storing into a promoted scalar since
2297 the insn will now have different modes on the input
2298 and output and hence will be invalid (except for the case
2299 of setting it to a constant, which does not need any
2300 change if it is valid). We generate extra code in that case,
2301 but combine.c will eliminate it. */
2302
2303 if (dest == var)
2304 {
2305 rtx temp;
2306 rtx fixeddest = SET_DEST (x);
2307
2308 /* STRICT_LOW_PART can be discarded, around a MEM. */
2309 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2310 fixeddest = XEXP (fixeddest, 0);
2311 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2312 if (GET_CODE (fixeddest) == SUBREG)
2313 {
2314 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2315 promoted_mode = GET_MODE (fixeddest);
2316 }
2317 else
2318 fixeddest = fixup_stack_1 (fixeddest, insn);
2319
2320 temp = gen_reg_rtx (promoted_mode);
2321
2322 emit_insn_after (gen_move_insn (fixeddest,
2323 gen_lowpart (GET_MODE (fixeddest),
2324 temp)),
2325 insn);
2326
2327 SET_DEST (x) = temp;
2328 }
2329 }
2330
2331 default:
2332 break;
2333 }
2334
2335 /* Nothing special about this RTX; fix its operands. */
2336
2337 fmt = GET_RTX_FORMAT (code);
2338 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2339 {
2340 if (fmt[i] == 'e')
2341 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2342 else if (fmt[i] == 'E')
2343 {
2344 register int j;
2345 for (j = 0; j < XVECLEN (x, i); j++)
2346 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2347 insn, replacements);
2348 }
2349 }
2350 }
2351 \f
2352 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2353 return an rtx (MEM:m1 newaddr) which is equivalent.
2354 If any insns must be emitted to compute NEWADDR, put them before INSN.
2355
2356 UNCRITICAL nonzero means accept paradoxical subregs.
2357 This is used for subregs found inside REG_NOTES. */
2358
2359 static rtx
2360 fixup_memory_subreg (x, insn, uncritical)
2361 rtx x;
2362 rtx insn;
2363 int uncritical;
2364 {
2365 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2366 rtx addr = XEXP (SUBREG_REG (x), 0);
2367 enum machine_mode mode = GET_MODE (x);
2368 rtx result;
2369
2370 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2371 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2372 && ! uncritical)
2373 abort ();
2374
2375 if (BYTES_BIG_ENDIAN)
2376 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2377 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2378 addr = plus_constant (addr, offset);
2379 if (!flag_force_addr && memory_address_p (mode, addr))
2380 /* Shortcut if no insns need be emitted. */
2381 return change_address (SUBREG_REG (x), mode, addr);
2382 start_sequence ();
2383 result = change_address (SUBREG_REG (x), mode, addr);
2384 emit_insn_before (gen_sequence (), insn);
2385 end_sequence ();
2386 return result;
2387 }
2388
2389 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2390 Replace subexpressions of X in place.
2391 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2392 Otherwise return X, with its contents possibly altered.
2393
2394 If any insns must be emitted to compute NEWADDR, put them before INSN.
2395
2396 UNCRITICAL is as in fixup_memory_subreg. */
2397
2398 static rtx
2399 walk_fixup_memory_subreg (x, insn, uncritical)
2400 register rtx x;
2401 rtx insn;
2402 int uncritical;
2403 {
2404 register enum rtx_code code;
2405 register const char *fmt;
2406 register int i;
2407
2408 if (x == 0)
2409 return 0;
2410
2411 code = GET_CODE (x);
2412
2413 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2414 return fixup_memory_subreg (x, insn, uncritical);
2415
2416 /* Nothing special about this RTX; fix its operands. */
2417
2418 fmt = GET_RTX_FORMAT (code);
2419 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2420 {
2421 if (fmt[i] == 'e')
2422 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2423 else if (fmt[i] == 'E')
2424 {
2425 register int j;
2426 for (j = 0; j < XVECLEN (x, i); j++)
2427 XVECEXP (x, i, j)
2428 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2429 }
2430 }
2431 return x;
2432 }
2433 \f
2434 /* For each memory ref within X, if it refers to a stack slot
2435 with an out of range displacement, put the address in a temp register
2436 (emitting new insns before INSN to load these registers)
2437 and alter the memory ref to use that register.
2438 Replace each such MEM rtx with a copy, to avoid clobberage. */
2439
2440 static rtx
2441 fixup_stack_1 (x, insn)
2442 rtx x;
2443 rtx insn;
2444 {
2445 register int i;
2446 register RTX_CODE code = GET_CODE (x);
2447 register const char *fmt;
2448
2449 if (code == MEM)
2450 {
2451 register rtx ad = XEXP (x, 0);
2452 /* If we have address of a stack slot but it's not valid
2453 (displacement is too large), compute the sum in a register. */
2454 if (GET_CODE (ad) == PLUS
2455 && GET_CODE (XEXP (ad, 0)) == REG
2456 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2457 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2458 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2459 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2460 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2461 #endif
2462 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2463 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2464 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2465 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2466 {
2467 rtx temp, seq;
2468 if (memory_address_p (GET_MODE (x), ad))
2469 return x;
2470
2471 start_sequence ();
2472 temp = copy_to_reg (ad);
2473 seq = gen_sequence ();
2474 end_sequence ();
2475 emit_insn_before (seq, insn);
2476 return change_address (x, VOIDmode, temp);
2477 }
2478 return x;
2479 }
2480
2481 fmt = GET_RTX_FORMAT (code);
2482 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2483 {
2484 if (fmt[i] == 'e')
2485 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2486 else if (fmt[i] == 'E')
2487 {
2488 register int j;
2489 for (j = 0; j < XVECLEN (x, i); j++)
2490 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2491 }
2492 }
2493 return x;
2494 }
2495 \f
2496 /* Optimization: a bit-field instruction whose field
2497 happens to be a byte or halfword in memory
2498 can be changed to a move instruction.
2499
2500 We call here when INSN is an insn to examine or store into a bit-field.
2501 BODY is the SET-rtx to be altered.
2502
2503 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2504 (Currently this is called only from function.c, and EQUIV_MEM
2505 is always 0.) */
2506
2507 static void
2508 optimize_bit_field (body, insn, equiv_mem)
2509 rtx body;
2510 rtx insn;
2511 rtx *equiv_mem;
2512 {
2513 register rtx bitfield;
2514 int destflag;
2515 rtx seq = 0;
2516 enum machine_mode mode;
2517
2518 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2519 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2520 bitfield = SET_DEST (body), destflag = 1;
2521 else
2522 bitfield = SET_SRC (body), destflag = 0;
2523
2524 /* First check that the field being stored has constant size and position
2525 and is in fact a byte or halfword suitably aligned. */
2526
2527 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2528 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2529 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2530 != BLKmode)
2531 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2532 {
2533 register rtx memref = 0;
2534
2535 /* Now check that the containing word is memory, not a register,
2536 and that it is safe to change the machine mode. */
2537
2538 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2539 memref = XEXP (bitfield, 0);
2540 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2541 && equiv_mem != 0)
2542 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2543 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2544 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2545 memref = SUBREG_REG (XEXP (bitfield, 0));
2546 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2547 && equiv_mem != 0
2548 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2549 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2550
2551 if (memref
2552 && ! mode_dependent_address_p (XEXP (memref, 0))
2553 && ! MEM_VOLATILE_P (memref))
2554 {
2555 /* Now adjust the address, first for any subreg'ing
2556 that we are now getting rid of,
2557 and then for which byte of the word is wanted. */
2558
2559 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2560 rtx insns;
2561
2562 /* Adjust OFFSET to count bits from low-address byte. */
2563 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2564 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2565 - offset - INTVAL (XEXP (bitfield, 1)));
2566
2567 /* Adjust OFFSET to count bytes from low-address byte. */
2568 offset /= BITS_PER_UNIT;
2569 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2570 {
2571 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2572 if (BYTES_BIG_ENDIAN)
2573 offset -= (MIN (UNITS_PER_WORD,
2574 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2575 - MIN (UNITS_PER_WORD,
2576 GET_MODE_SIZE (GET_MODE (memref))));
2577 }
2578
2579 start_sequence ();
2580 memref = change_address (memref, mode,
2581 plus_constant (XEXP (memref, 0), offset));
2582 insns = get_insns ();
2583 end_sequence ();
2584 emit_insns_before (insns, insn);
2585
2586 /* Store this memory reference where
2587 we found the bit field reference. */
2588
2589 if (destflag)
2590 {
2591 validate_change (insn, &SET_DEST (body), memref, 1);
2592 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2593 {
2594 rtx src = SET_SRC (body);
2595 while (GET_CODE (src) == SUBREG
2596 && SUBREG_WORD (src) == 0)
2597 src = SUBREG_REG (src);
2598 if (GET_MODE (src) != GET_MODE (memref))
2599 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2600 validate_change (insn, &SET_SRC (body), src, 1);
2601 }
2602 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2603 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2604 /* This shouldn't happen because anything that didn't have
2605 one of these modes should have got converted explicitly
2606 and then referenced through a subreg.
2607 This is so because the original bit-field was
2608 handled by agg_mode and so its tree structure had
2609 the same mode that memref now has. */
2610 abort ();
2611 }
2612 else
2613 {
2614 rtx dest = SET_DEST (body);
2615
2616 while (GET_CODE (dest) == SUBREG
2617 && SUBREG_WORD (dest) == 0
2618 && (GET_MODE_CLASS (GET_MODE (dest))
2619 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2620 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2621 <= UNITS_PER_WORD))
2622 dest = SUBREG_REG (dest);
2623
2624 validate_change (insn, &SET_DEST (body), dest, 1);
2625
2626 if (GET_MODE (dest) == GET_MODE (memref))
2627 validate_change (insn, &SET_SRC (body), memref, 1);
2628 else
2629 {
2630 /* Convert the mem ref to the destination mode. */
2631 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2632
2633 start_sequence ();
2634 convert_move (newreg, memref,
2635 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2636 seq = get_insns ();
2637 end_sequence ();
2638
2639 validate_change (insn, &SET_SRC (body), newreg, 1);
2640 }
2641 }
2642
2643 /* See if we can convert this extraction or insertion into
2644 a simple move insn. We might not be able to do so if this
2645 was, for example, part of a PARALLEL.
2646
2647 If we succeed, write out any needed conversions. If we fail,
2648 it is hard to guess why we failed, so don't do anything
2649 special; just let the optimization be suppressed. */
2650
2651 if (apply_change_group () && seq)
2652 emit_insns_before (seq, insn);
2653 }
2654 }
2655 }
2656 \f
2657 /* These routines are responsible for converting virtual register references
2658 to the actual hard register references once RTL generation is complete.
2659
2660 The following four variables are used for communication between the
2661 routines. They contain the offsets of the virtual registers from their
2662 respective hard registers. */
2663
2664 static int in_arg_offset;
2665 static int var_offset;
2666 static int dynamic_offset;
2667 static int out_arg_offset;
2668 static int cfa_offset;
2669
2670 /* In most machines, the stack pointer register is equivalent to the bottom
2671 of the stack. */
2672
2673 #ifndef STACK_POINTER_OFFSET
2674 #define STACK_POINTER_OFFSET 0
2675 #endif
2676
2677 /* If not defined, pick an appropriate default for the offset of dynamically
2678 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2679 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2680
2681 #ifndef STACK_DYNAMIC_OFFSET
2682
2683 #ifdef ACCUMULATE_OUTGOING_ARGS
2684 /* The bottom of the stack points to the actual arguments. If
2685 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2686 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2687 stack space for register parameters is not pushed by the caller, but
2688 rather part of the fixed stack areas and hence not included in
2689 `current_function_outgoing_args_size'. Nevertheless, we must allow
2690 for it when allocating stack dynamic objects. */
2691
2692 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2693 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2694 (current_function_outgoing_args_size \
2695 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2696
2697 #else
2698 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2699 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2700 #endif
2701
2702 #else
2703 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2704 #endif
2705 #endif
2706
2707 /* On a few machines, the CFA coincides with the arg pointer. */
2708
2709 #ifndef ARG_POINTER_CFA_OFFSET
2710 #define ARG_POINTER_CFA_OFFSET 0
2711 #endif
2712
2713
2714 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2715 its address taken. DECL is the decl for the object stored in the
2716 register, for later use if we do need to force REG into the stack.
2717 REG is overwritten by the MEM like in put_reg_into_stack. */
2718
2719 rtx
2720 gen_mem_addressof (reg, decl)
2721 rtx reg;
2722 tree decl;
2723 {
2724 tree type = TREE_TYPE (decl);
2725 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2726 REGNO (reg), decl);
2727
2728 /* If the original REG was a user-variable, then so is the REG whose
2729 address is being taken. Likewise for unchanging. */
2730 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2731 RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2732
2733 PUT_CODE (reg, MEM);
2734 PUT_MODE (reg, DECL_MODE (decl));
2735 XEXP (reg, 0) = r;
2736 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2737 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2738 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2739
2740 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2741 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
2742
2743 return reg;
2744 }
2745
2746 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2747
2748 #if 0
2749 void
2750 flush_addressof (decl)
2751 tree decl;
2752 {
2753 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2754 && DECL_RTL (decl) != 0
2755 && GET_CODE (DECL_RTL (decl)) == MEM
2756 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2757 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2758 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
2759 }
2760 #endif
2761
2762 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2763
2764 static void
2765 put_addressof_into_stack (r, ht)
2766 rtx r;
2767 struct hash_table *ht;
2768 {
2769 tree decl = ADDRESSOF_DECL (r);
2770 rtx reg = XEXP (r, 0);
2771
2772 if (GET_CODE (reg) != REG)
2773 abort ();
2774
2775 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2776 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2777 ADDRESSOF_REGNO (r),
2778 TREE_USED (decl) || DECL_INITIAL (decl) != 0, ht);
2779 }
2780
2781 /* List of replacements made below in purge_addressof_1 when creating
2782 bitfield insertions. */
2783 static rtx purge_bitfield_addressof_replacements;
2784
2785 /* List of replacements made below in purge_addressof_1 for patterns
2786 (MEM (ADDRESSOF (REG ...))). The key of the list entry is the
2787 corresponding (ADDRESSOF (REG ...)) and value is a substitution for
2788 the all pattern. List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
2789 enough in complex cases, e.g. when some field values can be
2790 extracted by usage MEM with narrower mode. */
2791 static rtx purge_addressof_replacements;
2792
2793 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2794 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2795 the stack. If the function returns FALSE then the replacement could not
2796 be made. */
2797
2798 static boolean
2799 purge_addressof_1 (loc, insn, force, store, ht)
2800 rtx *loc;
2801 rtx insn;
2802 int force, store;
2803 struct hash_table *ht;
2804 {
2805 rtx x;
2806 RTX_CODE code;
2807 int i, j;
2808 const char *fmt;
2809 boolean result = true;
2810
2811 /* Re-start here to avoid recursion in common cases. */
2812 restart:
2813
2814 x = *loc;
2815 if (x == 0)
2816 return true;
2817
2818 code = GET_CODE (x);
2819
2820 /* If we don't return in any of the cases below, we will recurse inside
2821 the RTX, which will normally result in any ADDRESSOF being forced into
2822 memory. */
2823 if (code == SET)
2824 {
2825 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
2826 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
2827 return result;
2828 }
2829
2830 else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2831 {
2832 /* We must create a copy of the rtx because it was created by
2833 overwriting a REG rtx which is always shared. */
2834 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2835 rtx insns;
2836
2837 if (validate_change (insn, loc, sub, 0)
2838 || validate_replace_rtx (x, sub, insn))
2839 return true;
2840
2841 start_sequence ();
2842 sub = force_operand (sub, NULL_RTX);
2843 if (! validate_change (insn, loc, sub, 0)
2844 && ! validate_replace_rtx (x, sub, insn))
2845 abort ();
2846
2847 insns = gen_sequence ();
2848 end_sequence ();
2849 emit_insn_before (insns, insn);
2850 return true;
2851 }
2852
2853 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2854 {
2855 rtx sub = XEXP (XEXP (x, 0), 0);
2856 rtx sub2;
2857
2858 if (GET_CODE (sub) == MEM)
2859 {
2860 sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2861 MEM_COPY_ATTRIBUTES (sub2, sub);
2862 RTX_UNCHANGING_P (sub2) = RTX_UNCHANGING_P (sub);
2863 sub = sub2;
2864 }
2865 else if (GET_CODE (sub) == REG
2866 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2867 ;
2868 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2869 {
2870 int size_x, size_sub;
2871
2872 if (!insn)
2873 {
2874 /* When processing REG_NOTES look at the list of
2875 replacements done on the insn to find the register that X
2876 was replaced by. */
2877 rtx tem;
2878
2879 for (tem = purge_bitfield_addressof_replacements;
2880 tem != NULL_RTX;
2881 tem = XEXP (XEXP (tem, 1), 1))
2882 if (rtx_equal_p (x, XEXP (tem, 0)))
2883 {
2884 *loc = XEXP (XEXP (tem, 1), 0);
2885 return true;
2886 }
2887
2888 /* See comment for purge_addressof_replacements. */
2889 for (tem = purge_addressof_replacements;
2890 tem != NULL_RTX;
2891 tem = XEXP (XEXP (tem, 1), 1))
2892 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
2893 {
2894 rtx z = XEXP (XEXP (tem, 1), 0);
2895
2896 if (GET_MODE (x) == GET_MODE (z)
2897 || (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
2898 && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
2899 abort ();
2900
2901 /* It can happen that the note may speak of things
2902 in a wider (or just different) mode than the
2903 code did. This is especially true of
2904 REG_RETVAL. */
2905
2906 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2907 z = SUBREG_REG (z);
2908
2909 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2910 && (GET_MODE_SIZE (GET_MODE (x))
2911 > GET_MODE_SIZE (GET_MODE (z))))
2912 {
2913 /* This can occur as a result in invalid
2914 pointer casts, e.g. float f; ...
2915 *(long long int *)&f.
2916 ??? We could emit a warning here, but
2917 without a line number that wouldn't be
2918 very helpful. */
2919 z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
2920 }
2921 else
2922 z = gen_lowpart (GET_MODE (x), z);
2923
2924 *loc = z;
2925 return true;
2926 }
2927
2928 /* Sometimes we may not be able to find the replacement. For
2929 example when the original insn was a MEM in a wider mode,
2930 and the note is part of a sign extension of a narrowed
2931 version of that MEM. Gcc testcase compile/990829-1.c can
2932 generate an example of this siutation. Rather than complain
2933 we return false, which will prompt our caller to remove the
2934 offending note. */
2935 return false;
2936 }
2937
2938 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2939 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2940
2941 /* Don't even consider working with paradoxical subregs,
2942 or the moral equivalent seen here. */
2943 if (size_x <= size_sub
2944 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2945 {
2946 /* Do a bitfield insertion to mirror what would happen
2947 in memory. */
2948
2949 rtx val, seq;
2950
2951 if (store)
2952 {
2953 rtx p = PREV_INSN (insn);
2954
2955 start_sequence ();
2956 val = gen_reg_rtx (GET_MODE (x));
2957 if (! validate_change (insn, loc, val, 0))
2958 {
2959 /* Discard the current sequence and put the
2960 ADDRESSOF on stack. */
2961 end_sequence ();
2962 goto give_up;
2963 }
2964 seq = gen_sequence ();
2965 end_sequence ();
2966 emit_insn_before (seq, insn);
2967 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
2968 insn, ht);
2969
2970 start_sequence ();
2971 store_bit_field (sub, size_x, 0, GET_MODE (x),
2972 val, GET_MODE_SIZE (GET_MODE (sub)),
2973 GET_MODE_SIZE (GET_MODE (sub)));
2974
2975 /* Make sure to unshare any shared rtl that store_bit_field
2976 might have created. */
2977 unshare_all_rtl_again (get_insns ());
2978
2979 seq = gen_sequence ();
2980 end_sequence ();
2981 p = emit_insn_after (seq, insn);
2982 if (NEXT_INSN (insn))
2983 compute_insns_for_mem (NEXT_INSN (insn),
2984 p ? NEXT_INSN (p) : NULL_RTX,
2985 ht);
2986 }
2987 else
2988 {
2989 rtx p = PREV_INSN (insn);
2990
2991 start_sequence ();
2992 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2993 GET_MODE (x), GET_MODE (x),
2994 GET_MODE_SIZE (GET_MODE (sub)),
2995 GET_MODE_SIZE (GET_MODE (sub)));
2996
2997 if (! validate_change (insn, loc, val, 0))
2998 {
2999 /* Discard the current sequence and put the
3000 ADDRESSOF on stack. */
3001 end_sequence ();
3002 goto give_up;
3003 }
3004
3005 seq = gen_sequence ();
3006 end_sequence ();
3007 emit_insn_before (seq, insn);
3008 compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3009 insn, ht);
3010 }
3011
3012 /* Remember the replacement so that the same one can be done
3013 on the REG_NOTES. */
3014 purge_bitfield_addressof_replacements
3015 = gen_rtx_EXPR_LIST (VOIDmode, x,
3016 gen_rtx_EXPR_LIST
3017 (VOIDmode, val,
3018 purge_bitfield_addressof_replacements));
3019
3020 /* We replaced with a reg -- all done. */
3021 return true;
3022 }
3023 }
3024
3025 else if (validate_change (insn, loc, sub, 0))
3026 {
3027 /* Remember the replacement so that the same one can be done
3028 on the REG_NOTES. */
3029 if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3030 {
3031 rtx tem;
3032
3033 for (tem = purge_addressof_replacements;
3034 tem != NULL_RTX;
3035 tem = XEXP (XEXP (tem, 1), 1))
3036 if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3037 {
3038 XEXP (XEXP (tem, 1), 0) = sub;
3039 return true;
3040 }
3041 purge_addressof_replacements
3042 = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3043 gen_rtx_EXPR_LIST (VOIDmode, sub,
3044 purge_addressof_replacements));
3045 return true;
3046 }
3047 goto restart;
3048 }
3049 give_up:;
3050 /* else give up and put it into the stack */
3051 }
3052
3053 else if (code == ADDRESSOF)
3054 {
3055 put_addressof_into_stack (x, ht);
3056 return true;
3057 }
3058 else if (code == SET)
3059 {
3060 result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3061 result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3062 return result;
3063 }
3064
3065 /* Scan all subexpressions. */
3066 fmt = GET_RTX_FORMAT (code);
3067 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3068 {
3069 if (*fmt == 'e')
3070 result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3071 else if (*fmt == 'E')
3072 for (j = 0; j < XVECLEN (x, i); j++)
3073 result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3074 }
3075
3076 return result;
3077 }
3078
3079 /* Return a new hash table entry in HT. */
3080
3081 static struct hash_entry *
3082 insns_for_mem_newfunc (he, ht, k)
3083 struct hash_entry *he;
3084 struct hash_table *ht;
3085 hash_table_key k ATTRIBUTE_UNUSED;
3086 {
3087 struct insns_for_mem_entry *ifmhe;
3088 if (he)
3089 return he;
3090
3091 ifmhe = ((struct insns_for_mem_entry *)
3092 hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
3093 ifmhe->insns = NULL_RTX;
3094
3095 return &ifmhe->he;
3096 }
3097
3098 /* Return a hash value for K, a REG. */
3099
3100 static unsigned long
3101 insns_for_mem_hash (k)
3102 hash_table_key k;
3103 {
3104 /* K is really a RTX. Just use the address as the hash value. */
3105 return (unsigned long) k;
3106 }
3107
3108 /* Return non-zero if K1 and K2 (two REGs) are the same. */
3109
3110 static boolean
3111 insns_for_mem_comp (k1, k2)
3112 hash_table_key k1;
3113 hash_table_key k2;
3114 {
3115 return k1 == k2;
3116 }
3117
3118 struct insns_for_mem_walk_info {
3119 /* The hash table that we are using to record which INSNs use which
3120 MEMs. */
3121 struct hash_table *ht;
3122
3123 /* The INSN we are currently proessing. */
3124 rtx insn;
3125
3126 /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3127 to find the insns that use the REGs in the ADDRESSOFs. */
3128 int pass;
3129 };
3130
3131 /* Called from compute_insns_for_mem via for_each_rtx. If R is a REG
3132 that might be used in an ADDRESSOF expression, record this INSN in
3133 the hash table given by DATA (which is really a pointer to an
3134 insns_for_mem_walk_info structure). */
3135
3136 static int
3137 insns_for_mem_walk (r, data)
3138 rtx *r;
3139 void *data;
3140 {
3141 struct insns_for_mem_walk_info *ifmwi
3142 = (struct insns_for_mem_walk_info *) data;
3143
3144 if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3145 && GET_CODE (XEXP (*r, 0)) == REG)
3146 hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
3147 else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3148 {
3149 /* Lookup this MEM in the hashtable, creating it if necessary. */
3150 struct insns_for_mem_entry *ifme
3151 = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
3152 *r,
3153 /*create=*/0,
3154 /*copy=*/0);
3155
3156 /* If we have not already recorded this INSN, do so now. Since
3157 we process the INSNs in order, we know that if we have
3158 recorded it it must be at the front of the list. */
3159 if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3160 {
3161 /* We do the allocation on the same obstack as is used for
3162 the hash table since this memory will not be used once
3163 the hash table is deallocated. */
3164 push_obstacks (&ifmwi->ht->memory, &ifmwi->ht->memory);
3165 ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3166 ifme->insns);
3167 pop_obstacks ();
3168 }
3169 }
3170
3171 return 0;
3172 }
3173
3174 /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3175 which REGs in HT. */
3176
3177 static void
3178 compute_insns_for_mem (insns, last_insn, ht)
3179 rtx insns;
3180 rtx last_insn;
3181 struct hash_table *ht;
3182 {
3183 rtx insn;
3184 struct insns_for_mem_walk_info ifmwi;
3185 ifmwi.ht = ht;
3186
3187 for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3188 for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3189 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3190 {
3191 ifmwi.insn = insn;
3192 for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3193 }
3194 }
3195
3196 /* Helper function for purge_addressof called through for_each_rtx.
3197 Returns true iff the rtl is an ADDRESSOF. */
3198 static int
3199 is_addressof (rtl, data)
3200 rtx * rtl;
3201 void * data ATTRIBUTE_UNUSED;
3202 {
3203 return GET_CODE (* rtl) == ADDRESSOF;
3204 }
3205
3206 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3207 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3208 stack. */
3209
3210 void
3211 purge_addressof (insns)
3212 rtx insns;
3213 {
3214 rtx insn;
3215 struct hash_table ht;
3216
3217 /* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
3218 requires a fixup pass over the instruction stream to correct
3219 INSNs that depended on the REG being a REG, and not a MEM. But,
3220 these fixup passes are slow. Furthermore, more MEMs are not
3221 mentioned in very many instructions. So, we speed up the process
3222 by pre-calculating which REGs occur in which INSNs; that allows
3223 us to perform the fixup passes much more quickly. */
3224 hash_table_init (&ht,
3225 insns_for_mem_newfunc,
3226 insns_for_mem_hash,
3227 insns_for_mem_comp);
3228 compute_insns_for_mem (insns, NULL_RTX, &ht);
3229
3230 for (insn = insns; insn; insn = NEXT_INSN (insn))
3231 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3232 || GET_CODE (insn) == CALL_INSN)
3233 {
3234 if (! purge_addressof_1 (&PATTERN (insn), insn,
3235 asm_noperands (PATTERN (insn)) > 0, 0, &ht))
3236 /* If we could not replace the ADDRESSOFs in the insn,
3237 something is wrong. */
3238 abort ();
3239
3240 if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, &ht))
3241 {
3242 /* If we could not replace the ADDRESSOFs in the insn's notes,
3243 we can just remove the offending notes instead. */
3244 rtx note;
3245
3246 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3247 {
3248 /* If we find a REG_RETVAL note then the insn is a libcall.
3249 Such insns must have REG_EQUAL notes as well, in order
3250 for later passes of the compiler to work. So it is not
3251 safe to delete the notes here, and instead we abort. */
3252 if (REG_NOTE_KIND (note) == REG_RETVAL)
3253 abort ();
3254 if (for_each_rtx (& note, is_addressof, NULL))
3255 remove_note (insn, note);
3256 }
3257 }
3258 }
3259
3260 /* Clean up. */
3261 hash_table_free (&ht);
3262 purge_bitfield_addressof_replacements = 0;
3263 purge_addressof_replacements = 0;
3264 }
3265 \f
3266 /* Pass through the INSNS of function FNDECL and convert virtual register
3267 references to hard register references. */
3268
3269 void
3270 instantiate_virtual_regs (fndecl, insns)
3271 tree fndecl;
3272 rtx insns;
3273 {
3274 rtx insn;
3275 int i;
3276
3277 /* Compute the offsets to use for this function. */
3278 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3279 var_offset = STARTING_FRAME_OFFSET;
3280 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3281 out_arg_offset = STACK_POINTER_OFFSET;
3282 cfa_offset = ARG_POINTER_CFA_OFFSET;
3283
3284 /* Scan all variables and parameters of this function. For each that is
3285 in memory, instantiate all virtual registers if the result is a valid
3286 address. If not, we do it later. That will handle most uses of virtual
3287 regs on many machines. */
3288 instantiate_decls (fndecl, 1);
3289
3290 /* Initialize recognition, indicating that volatile is OK. */
3291 init_recog ();
3292
3293 /* Scan through all the insns, instantiating every virtual register still
3294 present. */
3295 for (insn = insns; insn; insn = NEXT_INSN (insn))
3296 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3297 || GET_CODE (insn) == CALL_INSN)
3298 {
3299 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3300 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3301 }
3302
3303 /* Instantiate the stack slots for the parm registers, for later use in
3304 addressof elimination. */
3305 for (i = 0; i < max_parm_reg; ++i)
3306 if (parm_reg_stack_loc[i])
3307 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3308
3309 /* Now instantiate the remaining register equivalences for debugging info.
3310 These will not be valid addresses. */
3311 instantiate_decls (fndecl, 0);
3312
3313 /* Indicate that, from now on, assign_stack_local should use
3314 frame_pointer_rtx. */
3315 virtuals_instantiated = 1;
3316 }
3317
3318 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3319 all virtual registers in their DECL_RTL's.
3320
3321 If VALID_ONLY, do this only if the resulting address is still valid.
3322 Otherwise, always do it. */
3323
3324 static void
3325 instantiate_decls (fndecl, valid_only)
3326 tree fndecl;
3327 int valid_only;
3328 {
3329 tree decl;
3330
3331 if (DECL_SAVED_INSNS (fndecl))
3332 /* When compiling an inline function, the obstack used for
3333 rtl allocation is the maybepermanent_obstack. Calling
3334 `resume_temporary_allocation' switches us back to that
3335 obstack while we process this function's parameters. */
3336 resume_temporary_allocation ();
3337
3338 /* Process all parameters of the function. */
3339 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3340 {
3341 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3342
3343 instantiate_decl (DECL_RTL (decl), size, valid_only);
3344
3345 /* If the parameter was promoted, then the incoming RTL mode may be
3346 larger than the declared type size. We must use the larger of
3347 the two sizes. */
3348 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3349 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3350 }
3351
3352 /* Now process all variables defined in the function or its subblocks. */
3353 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3354
3355 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3356 {
3357 /* Save all rtl allocated for this function by raising the
3358 high-water mark on the maybepermanent_obstack. */
3359 preserve_data ();
3360 /* All further rtl allocation is now done in the current_obstack. */
3361 rtl_in_current_obstack ();
3362 }
3363 }
3364
3365 /* Subroutine of instantiate_decls: Process all decls in the given
3366 BLOCK node and all its subblocks. */
3367
3368 static void
3369 instantiate_decls_1 (let, valid_only)
3370 tree let;
3371 int valid_only;
3372 {
3373 tree t;
3374
3375 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3376 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3377 valid_only);
3378
3379 /* Process all subblocks. */
3380 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3381 instantiate_decls_1 (t, valid_only);
3382 }
3383
3384 /* Subroutine of the preceding procedures: Given RTL representing a
3385 decl and the size of the object, do any instantiation required.
3386
3387 If VALID_ONLY is non-zero, it means that the RTL should only be
3388 changed if the new address is valid. */
3389
3390 static void
3391 instantiate_decl (x, size, valid_only)
3392 rtx x;
3393 int size;
3394 int valid_only;
3395 {
3396 enum machine_mode mode;
3397 rtx addr;
3398
3399 /* If this is not a MEM, no need to do anything. Similarly if the
3400 address is a constant or a register that is not a virtual register. */
3401
3402 if (x == 0 || GET_CODE (x) != MEM)
3403 return;
3404
3405 addr = XEXP (x, 0);
3406 if (CONSTANT_P (addr)
3407 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3408 || (GET_CODE (addr) == REG
3409 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3410 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3411 return;
3412
3413 /* If we should only do this if the address is valid, copy the address.
3414 We need to do this so we can undo any changes that might make the
3415 address invalid. This copy is unfortunate, but probably can't be
3416 avoided. */
3417
3418 if (valid_only)
3419 addr = copy_rtx (addr);
3420
3421 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3422
3423 if (valid_only)
3424 {
3425 /* Now verify that the resulting address is valid for every integer or
3426 floating-point mode up to and including SIZE bytes long. We do this
3427 since the object might be accessed in any mode and frame addresses
3428 are shared. */
3429
3430 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3431 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3432 mode = GET_MODE_WIDER_MODE (mode))
3433 if (! memory_address_p (mode, addr))
3434 return;
3435
3436 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3437 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3438 mode = GET_MODE_WIDER_MODE (mode))
3439 if (! memory_address_p (mode, addr))
3440 return;
3441 }
3442
3443 /* Put back the address now that we have updated it and we either know
3444 it is valid or we don't care whether it is valid. */
3445
3446 XEXP (x, 0) = addr;
3447 }
3448 \f
3449 /* Given a pointer to a piece of rtx and an optional pointer to the
3450 containing object, instantiate any virtual registers present in it.
3451
3452 If EXTRA_INSNS, we always do the replacement and generate
3453 any extra insns before OBJECT. If it zero, we do nothing if replacement
3454 is not valid.
3455
3456 Return 1 if we either had nothing to do or if we were able to do the
3457 needed replacement. Return 0 otherwise; we only return zero if
3458 EXTRA_INSNS is zero.
3459
3460 We first try some simple transformations to avoid the creation of extra
3461 pseudos. */
3462
3463 static int
3464 instantiate_virtual_regs_1 (loc, object, extra_insns)
3465 rtx *loc;
3466 rtx object;
3467 int extra_insns;
3468 {
3469 rtx x;
3470 RTX_CODE code;
3471 rtx new = 0;
3472 HOST_WIDE_INT offset = 0;
3473 rtx temp;
3474 rtx seq;
3475 int i, j;
3476 const char *fmt;
3477
3478 /* Re-start here to avoid recursion in common cases. */
3479 restart:
3480
3481 x = *loc;
3482 if (x == 0)
3483 return 1;
3484
3485 code = GET_CODE (x);
3486
3487 /* Check for some special cases. */
3488 switch (code)
3489 {
3490 case CONST_INT:
3491 case CONST_DOUBLE:
3492 case CONST:
3493 case SYMBOL_REF:
3494 case CODE_LABEL:
3495 case PC:
3496 case CC0:
3497 case ASM_INPUT:
3498 case ADDR_VEC:
3499 case ADDR_DIFF_VEC:
3500 case RETURN:
3501 return 1;
3502
3503 case SET:
3504 /* We are allowed to set the virtual registers. This means that
3505 the actual register should receive the source minus the
3506 appropriate offset. This is used, for example, in the handling
3507 of non-local gotos. */
3508 if (SET_DEST (x) == virtual_incoming_args_rtx)
3509 new = arg_pointer_rtx, offset = - in_arg_offset;
3510 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3511 new = frame_pointer_rtx, offset = - var_offset;
3512 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3513 new = stack_pointer_rtx, offset = - dynamic_offset;
3514 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3515 new = stack_pointer_rtx, offset = - out_arg_offset;
3516 else if (SET_DEST (x) == virtual_cfa_rtx)
3517 new = arg_pointer_rtx, offset = - cfa_offset;
3518
3519 if (new)
3520 {
3521 rtx src = SET_SRC (x);
3522
3523 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3524
3525 /* The only valid sources here are PLUS or REG. Just do
3526 the simplest possible thing to handle them. */
3527 if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3528 abort ();
3529
3530 start_sequence ();
3531 if (GET_CODE (src) != REG)
3532 temp = force_operand (src, NULL_RTX);
3533 else
3534 temp = src;
3535 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3536 seq = get_insns ();
3537 end_sequence ();
3538
3539 emit_insns_before (seq, object);
3540 SET_DEST (x) = new;
3541
3542 if (! validate_change (object, &SET_SRC (x), temp, 0)
3543 || ! extra_insns)
3544 abort ();
3545
3546 return 1;
3547 }
3548
3549 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3550 loc = &SET_SRC (x);
3551 goto restart;
3552
3553 case PLUS:
3554 /* Handle special case of virtual register plus constant. */
3555 if (CONSTANT_P (XEXP (x, 1)))
3556 {
3557 rtx old, new_offset;
3558
3559 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3560 if (GET_CODE (XEXP (x, 0)) == PLUS)
3561 {
3562 rtx inner = XEXP (XEXP (x, 0), 0);
3563
3564 if (inner == virtual_incoming_args_rtx)
3565 new = arg_pointer_rtx, offset = in_arg_offset;
3566 else if (inner == virtual_stack_vars_rtx)
3567 new = frame_pointer_rtx, offset = var_offset;
3568 else if (inner == virtual_stack_dynamic_rtx)
3569 new = stack_pointer_rtx, offset = dynamic_offset;
3570 else if (inner == virtual_outgoing_args_rtx)
3571 new = stack_pointer_rtx, offset = out_arg_offset;
3572 else if (inner == virtual_cfa_rtx)
3573 new = arg_pointer_rtx, offset = cfa_offset;
3574 else
3575 {
3576 loc = &XEXP (x, 0);
3577 goto restart;
3578 }
3579
3580 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3581 extra_insns);
3582 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3583 }
3584
3585 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3586 new = arg_pointer_rtx, offset = in_arg_offset;
3587 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3588 new = frame_pointer_rtx, offset = var_offset;
3589 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3590 new = stack_pointer_rtx, offset = dynamic_offset;
3591 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3592 new = stack_pointer_rtx, offset = out_arg_offset;
3593 else if (XEXP (x, 0) == virtual_cfa_rtx)
3594 new = arg_pointer_rtx, offset = cfa_offset;
3595 else
3596 {
3597 /* We know the second operand is a constant. Unless the
3598 first operand is a REG (which has been already checked),
3599 it needs to be checked. */
3600 if (GET_CODE (XEXP (x, 0)) != REG)
3601 {
3602 loc = &XEXP (x, 0);
3603 goto restart;
3604 }
3605 return 1;
3606 }
3607
3608 new_offset = plus_constant (XEXP (x, 1), offset);
3609
3610 /* If the new constant is zero, try to replace the sum with just
3611 the register. */
3612 if (new_offset == const0_rtx
3613 && validate_change (object, loc, new, 0))
3614 return 1;
3615
3616 /* Next try to replace the register and new offset.
3617 There are two changes to validate here and we can't assume that
3618 in the case of old offset equals new just changing the register
3619 will yield a valid insn. In the interests of a little efficiency,
3620 however, we only call validate change once (we don't queue up the
3621 changes and then call apply_change_group). */
3622
3623 old = XEXP (x, 0);
3624 if (offset == 0
3625 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3626 : (XEXP (x, 0) = new,
3627 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3628 {
3629 if (! extra_insns)
3630 {
3631 XEXP (x, 0) = old;
3632 return 0;
3633 }
3634
3635 /* Otherwise copy the new constant into a register and replace
3636 constant with that register. */
3637 temp = gen_reg_rtx (Pmode);
3638 XEXP (x, 0) = new;
3639 if (validate_change (object, &XEXP (x, 1), temp, 0))
3640 emit_insn_before (gen_move_insn (temp, new_offset), object);
3641 else
3642 {
3643 /* If that didn't work, replace this expression with a
3644 register containing the sum. */
3645
3646 XEXP (x, 0) = old;
3647 new = gen_rtx_PLUS (Pmode, new, new_offset);
3648
3649 start_sequence ();
3650 temp = force_operand (new, NULL_RTX);
3651 seq = get_insns ();
3652 end_sequence ();
3653
3654 emit_insns_before (seq, object);
3655 if (! validate_change (object, loc, temp, 0)
3656 && ! validate_replace_rtx (x, temp, object))
3657 abort ();
3658 }
3659 }
3660
3661 return 1;
3662 }
3663
3664 /* Fall through to generic two-operand expression case. */
3665 case EXPR_LIST:
3666 case CALL:
3667 case COMPARE:
3668 case MINUS:
3669 case MULT:
3670 case DIV: case UDIV:
3671 case MOD: case UMOD:
3672 case AND: case IOR: case XOR:
3673 case ROTATERT: case ROTATE:
3674 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3675 case NE: case EQ:
3676 case GE: case GT: case GEU: case GTU:
3677 case LE: case LT: case LEU: case LTU:
3678 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3679 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3680 loc = &XEXP (x, 0);
3681 goto restart;
3682
3683 case MEM:
3684 /* Most cases of MEM that convert to valid addresses have already been
3685 handled by our scan of decls. The only special handling we
3686 need here is to make a copy of the rtx to ensure it isn't being
3687 shared if we have to change it to a pseudo.
3688
3689 If the rtx is a simple reference to an address via a virtual register,
3690 it can potentially be shared. In such cases, first try to make it
3691 a valid address, which can also be shared. Otherwise, copy it and
3692 proceed normally.
3693
3694 First check for common cases that need no processing. These are
3695 usually due to instantiation already being done on a previous instance
3696 of a shared rtx. */
3697
3698 temp = XEXP (x, 0);
3699 if (CONSTANT_ADDRESS_P (temp)
3700 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3701 || temp == arg_pointer_rtx
3702 #endif
3703 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3704 || temp == hard_frame_pointer_rtx
3705 #endif
3706 || temp == frame_pointer_rtx)
3707 return 1;
3708
3709 if (GET_CODE (temp) == PLUS
3710 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3711 && (XEXP (temp, 0) == frame_pointer_rtx
3712 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3713 || XEXP (temp, 0) == hard_frame_pointer_rtx
3714 #endif
3715 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3716 || XEXP (temp, 0) == arg_pointer_rtx
3717 #endif
3718 ))
3719 return 1;
3720
3721 if (temp == virtual_stack_vars_rtx
3722 || temp == virtual_incoming_args_rtx
3723 || (GET_CODE (temp) == PLUS
3724 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3725 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3726 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3727 {
3728 /* This MEM may be shared. If the substitution can be done without
3729 the need to generate new pseudos, we want to do it in place
3730 so all copies of the shared rtx benefit. The call below will
3731 only make substitutions if the resulting address is still
3732 valid.
3733
3734 Note that we cannot pass X as the object in the recursive call
3735 since the insn being processed may not allow all valid
3736 addresses. However, if we were not passed on object, we can
3737 only modify X without copying it if X will have a valid
3738 address.
3739
3740 ??? Also note that this can still lose if OBJECT is an insn that
3741 has less restrictions on an address that some other insn.
3742 In that case, we will modify the shared address. This case
3743 doesn't seem very likely, though. One case where this could
3744 happen is in the case of a USE or CLOBBER reference, but we
3745 take care of that below. */
3746
3747 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3748 object ? object : x, 0))
3749 return 1;
3750
3751 /* Otherwise make a copy and process that copy. We copy the entire
3752 RTL expression since it might be a PLUS which could also be
3753 shared. */
3754 *loc = x = copy_rtx (x);
3755 }
3756
3757 /* Fall through to generic unary operation case. */
3758 case SUBREG:
3759 case STRICT_LOW_PART:
3760 case NEG: case NOT:
3761 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3762 case SIGN_EXTEND: case ZERO_EXTEND:
3763 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3764 case FLOAT: case FIX:
3765 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3766 case ABS:
3767 case SQRT:
3768 case FFS:
3769 /* These case either have just one operand or we know that we need not
3770 check the rest of the operands. */
3771 loc = &XEXP (x, 0);
3772 goto restart;
3773
3774 case USE:
3775 case CLOBBER:
3776 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3777 go ahead and make the invalid one, but do it to a copy. For a REG,
3778 just make the recursive call, since there's no chance of a problem. */
3779
3780 if ((GET_CODE (XEXP (x, 0)) == MEM
3781 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3782 0))
3783 || (GET_CODE (XEXP (x, 0)) == REG
3784 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3785 return 1;
3786
3787 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3788 loc = &XEXP (x, 0);
3789 goto restart;
3790
3791 case REG:
3792 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3793 in front of this insn and substitute the temporary. */
3794 if (x == virtual_incoming_args_rtx)
3795 new = arg_pointer_rtx, offset = in_arg_offset;
3796 else if (x == virtual_stack_vars_rtx)
3797 new = frame_pointer_rtx, offset = var_offset;
3798 else if (x == virtual_stack_dynamic_rtx)
3799 new = stack_pointer_rtx, offset = dynamic_offset;
3800 else if (x == virtual_outgoing_args_rtx)
3801 new = stack_pointer_rtx, offset = out_arg_offset;
3802 else if (x == virtual_cfa_rtx)
3803 new = arg_pointer_rtx, offset = cfa_offset;
3804
3805 if (new)
3806 {
3807 temp = plus_constant (new, offset);
3808 if (!validate_change (object, loc, temp, 0))
3809 {
3810 if (! extra_insns)
3811 return 0;
3812
3813 start_sequence ();
3814 temp = force_operand (temp, NULL_RTX);
3815 seq = get_insns ();
3816 end_sequence ();
3817
3818 emit_insns_before (seq, object);
3819 if (! validate_change (object, loc, temp, 0)
3820 && ! validate_replace_rtx (x, temp, object))
3821 abort ();
3822 }
3823 }
3824
3825 return 1;
3826
3827 case ADDRESSOF:
3828 if (GET_CODE (XEXP (x, 0)) == REG)
3829 return 1;
3830
3831 else if (GET_CODE (XEXP (x, 0)) == MEM)
3832 {
3833 /* If we have a (addressof (mem ..)), do any instantiation inside
3834 since we know we'll be making the inside valid when we finally
3835 remove the ADDRESSOF. */
3836 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3837 return 1;
3838 }
3839 break;
3840
3841 default:
3842 break;
3843 }
3844
3845 /* Scan all subexpressions. */
3846 fmt = GET_RTX_FORMAT (code);
3847 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3848 if (*fmt == 'e')
3849 {
3850 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3851 return 0;
3852 }
3853 else if (*fmt == 'E')
3854 for (j = 0; j < XVECLEN (x, i); j++)
3855 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3856 extra_insns))
3857 return 0;
3858
3859 return 1;
3860 }
3861 \f
3862 /* Optimization: assuming this function does not receive nonlocal gotos,
3863 delete the handlers for such, as well as the insns to establish
3864 and disestablish them. */
3865
3866 static void
3867 delete_handlers ()
3868 {
3869 rtx insn;
3870 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3871 {
3872 /* Delete the handler by turning off the flag that would
3873 prevent jump_optimize from deleting it.
3874 Also permit deletion of the nonlocal labels themselves
3875 if nothing local refers to them. */
3876 if (GET_CODE (insn) == CODE_LABEL)
3877 {
3878 tree t, last_t;
3879
3880 LABEL_PRESERVE_P (insn) = 0;
3881
3882 /* Remove it from the nonlocal_label list, to avoid confusing
3883 flow. */
3884 for (t = nonlocal_labels, last_t = 0; t;
3885 last_t = t, t = TREE_CHAIN (t))
3886 if (DECL_RTL (TREE_VALUE (t)) == insn)
3887 break;
3888 if (t)
3889 {
3890 if (! last_t)
3891 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3892 else
3893 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3894 }
3895 }
3896 if (GET_CODE (insn) == INSN)
3897 {
3898 int can_delete = 0;
3899 rtx t;
3900 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3901 if (reg_mentioned_p (t, PATTERN (insn)))
3902 {
3903 can_delete = 1;
3904 break;
3905 }
3906 if (can_delete
3907 || (nonlocal_goto_stack_level != 0
3908 && reg_mentioned_p (nonlocal_goto_stack_level,
3909 PATTERN (insn))))
3910 delete_insn (insn);
3911 }
3912 }
3913 }
3914 \f
3915 int
3916 max_parm_reg_num ()
3917 {
3918 return max_parm_reg;
3919 }
3920
3921 /* Return the first insn following those generated by `assign_parms'. */
3922
3923 rtx
3924 get_first_nonparm_insn ()
3925 {
3926 if (last_parm_insn)
3927 return NEXT_INSN (last_parm_insn);
3928 return get_insns ();
3929 }
3930
3931 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3932 Crash if there is none. */
3933
3934 rtx
3935 get_first_block_beg ()
3936 {
3937 register rtx searcher;
3938 register rtx insn = get_first_nonparm_insn ();
3939
3940 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3941 if (GET_CODE (searcher) == NOTE
3942 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3943 return searcher;
3944
3945 abort (); /* Invalid call to this function. (See comments above.) */
3946 return NULL_RTX;
3947 }
3948
3949 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3950 This means a type for which function calls must pass an address to the
3951 function or get an address back from the function.
3952 EXP may be a type node or an expression (whose type is tested). */
3953
3954 int
3955 aggregate_value_p (exp)
3956 tree exp;
3957 {
3958 int i, regno, nregs;
3959 rtx reg;
3960 tree type;
3961 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3962 type = exp;
3963 else
3964 type = TREE_TYPE (exp);
3965
3966 if (RETURN_IN_MEMORY (type))
3967 return 1;
3968 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3969 and thus can't be returned in registers. */
3970 if (TREE_ADDRESSABLE (type))
3971 return 1;
3972 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3973 return 1;
3974 /* Make sure we have suitable call-clobbered regs to return
3975 the value in; if not, we must return it in memory. */
3976 reg = hard_function_value (type, 0, 0);
3977
3978 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3979 it is OK. */
3980 if (GET_CODE (reg) != REG)
3981 return 0;
3982
3983 regno = REGNO (reg);
3984 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3985 for (i = 0; i < nregs; i++)
3986 if (! call_used_regs[regno + i])
3987 return 1;
3988 return 0;
3989 }
3990 \f
3991 /* Assign RTL expressions to the function's parameters.
3992 This may involve copying them into registers and using
3993 those registers as the RTL for them. */
3994
3995 void
3996 assign_parms (fndecl)
3997 tree fndecl;
3998 {
3999 register tree parm;
4000 register rtx entry_parm = 0;
4001 register rtx stack_parm = 0;
4002 CUMULATIVE_ARGS args_so_far;
4003 enum machine_mode promoted_mode, passed_mode;
4004 enum machine_mode nominal_mode, promoted_nominal_mode;
4005 int unsignedp;
4006 /* Total space needed so far for args on the stack,
4007 given as a constant and a tree-expression. */
4008 struct args_size stack_args_size;
4009 tree fntype = TREE_TYPE (fndecl);
4010 tree fnargs = DECL_ARGUMENTS (fndecl);
4011 /* This is used for the arg pointer when referring to stack args. */
4012 rtx internal_arg_pointer;
4013 /* This is a dummy PARM_DECL that we used for the function result if
4014 the function returns a structure. */
4015 tree function_result_decl = 0;
4016 #ifdef SETUP_INCOMING_VARARGS
4017 int varargs_setup = 0;
4018 #endif
4019 rtx conversion_insns = 0;
4020 struct args_size alignment_pad;
4021
4022 /* Nonzero if the last arg is named `__builtin_va_alist',
4023 which is used on some machines for old-fashioned non-ANSI varargs.h;
4024 this should be stuck onto the stack as if it had arrived there. */
4025 int hide_last_arg
4026 = (current_function_varargs
4027 && fnargs
4028 && (parm = tree_last (fnargs)) != 0
4029 && DECL_NAME (parm)
4030 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
4031 "__builtin_va_alist")));
4032
4033 /* Nonzero if function takes extra anonymous args.
4034 This means the last named arg must be on the stack
4035 right before the anonymous ones. */
4036 int stdarg
4037 = (TYPE_ARG_TYPES (fntype) != 0
4038 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4039 != void_type_node));
4040
4041 current_function_stdarg = stdarg;
4042
4043 /* If the reg that the virtual arg pointer will be translated into is
4044 not a fixed reg or is the stack pointer, make a copy of the virtual
4045 arg pointer, and address parms via the copy. The frame pointer is
4046 considered fixed even though it is not marked as such.
4047
4048 The second time through, simply use ap to avoid generating rtx. */
4049
4050 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4051 || ! (fixed_regs[ARG_POINTER_REGNUM]
4052 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4053 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4054 else
4055 internal_arg_pointer = virtual_incoming_args_rtx;
4056 current_function_internal_arg_pointer = internal_arg_pointer;
4057
4058 stack_args_size.constant = 0;
4059 stack_args_size.var = 0;
4060
4061 /* If struct value address is treated as the first argument, make it so. */
4062 if (aggregate_value_p (DECL_RESULT (fndecl))
4063 && ! current_function_returns_pcc_struct
4064 && struct_value_incoming_rtx == 0)
4065 {
4066 tree type = build_pointer_type (TREE_TYPE (fntype));
4067
4068 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4069
4070 DECL_ARG_TYPE (function_result_decl) = type;
4071 TREE_CHAIN (function_result_decl) = fnargs;
4072 fnargs = function_result_decl;
4073 }
4074
4075 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4076 parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
4077
4078 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4079 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4080 #else
4081 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4082 #endif
4083
4084 /* We haven't yet found an argument that we must push and pretend the
4085 caller did. */
4086 current_function_pretend_args_size = 0;
4087
4088 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4089 {
4090 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
4091 struct args_size stack_offset;
4092 struct args_size arg_size;
4093 int passed_pointer = 0;
4094 int did_conversion = 0;
4095 tree passed_type = DECL_ARG_TYPE (parm);
4096 tree nominal_type = TREE_TYPE (parm);
4097 int pretend_named;
4098
4099 /* Set LAST_NAMED if this is last named arg before some
4100 anonymous args. */
4101 int last_named = ((TREE_CHAIN (parm) == 0
4102 || DECL_NAME (TREE_CHAIN (parm)) == 0)
4103 && (stdarg || current_function_varargs));
4104 /* Set NAMED_ARG if this arg should be treated as a named arg. For
4105 most machines, if this is a varargs/stdarg function, then we treat
4106 the last named arg as if it were anonymous too. */
4107 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4108
4109 if (TREE_TYPE (parm) == error_mark_node
4110 /* This can happen after weird syntax errors
4111 or if an enum type is defined among the parms. */
4112 || TREE_CODE (parm) != PARM_DECL
4113 || passed_type == NULL)
4114 {
4115 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
4116 = gen_rtx_MEM (BLKmode, const0_rtx);
4117 TREE_USED (parm) = 1;
4118 continue;
4119 }
4120
4121 /* For varargs.h function, save info about regs and stack space
4122 used by the individual args, not including the va_alist arg. */
4123 if (hide_last_arg && last_named)
4124 current_function_args_info = args_so_far;
4125
4126 /* Find mode of arg as it is passed, and mode of arg
4127 as it should be during execution of this function. */
4128 passed_mode = TYPE_MODE (passed_type);
4129 nominal_mode = TYPE_MODE (nominal_type);
4130
4131 /* If the parm's mode is VOID, its value doesn't matter,
4132 and avoid the usual things like emit_move_insn that could crash. */
4133 if (nominal_mode == VOIDmode)
4134 {
4135 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4136 continue;
4137 }
4138
4139 /* If the parm is to be passed as a transparent union, use the
4140 type of the first field for the tests below. We have already
4141 verified that the modes are the same. */
4142 if (DECL_TRANSPARENT_UNION (parm)
4143 || TYPE_TRANSPARENT_UNION (passed_type))
4144 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4145
4146 /* See if this arg was passed by invisible reference. It is if
4147 it is an object whose size depends on the contents of the
4148 object itself or if the machine requires these objects be passed
4149 that way. */
4150
4151 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4152 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4153 || TREE_ADDRESSABLE (passed_type)
4154 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4155 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4156 passed_type, named_arg)
4157 #endif
4158 )
4159 {
4160 passed_type = nominal_type = build_pointer_type (passed_type);
4161 passed_pointer = 1;
4162 passed_mode = nominal_mode = Pmode;
4163 }
4164
4165 promoted_mode = passed_mode;
4166
4167 #ifdef PROMOTE_FUNCTION_ARGS
4168 /* Compute the mode in which the arg is actually extended to. */
4169 unsignedp = TREE_UNSIGNED (passed_type);
4170 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4171 #endif
4172
4173 /* Let machine desc say which reg (if any) the parm arrives in.
4174 0 means it arrives on the stack. */
4175 #ifdef FUNCTION_INCOMING_ARG
4176 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4177 passed_type, named_arg);
4178 #else
4179 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4180 passed_type, named_arg);
4181 #endif
4182
4183 if (entry_parm == 0)
4184 promoted_mode = passed_mode;
4185
4186 #ifdef SETUP_INCOMING_VARARGS
4187 /* If this is the last named parameter, do any required setup for
4188 varargs or stdargs. We need to know about the case of this being an
4189 addressable type, in which case we skip the registers it
4190 would have arrived in.
4191
4192 For stdargs, LAST_NAMED will be set for two parameters, the one that
4193 is actually the last named, and the dummy parameter. We only
4194 want to do this action once.
4195
4196 Also, indicate when RTL generation is to be suppressed. */
4197 if (last_named && !varargs_setup)
4198 {
4199 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4200 current_function_pretend_args_size, 0);
4201 varargs_setup = 1;
4202 }
4203 #endif
4204
4205 /* Determine parm's home in the stack,
4206 in case it arrives in the stack or we should pretend it did.
4207
4208 Compute the stack position and rtx where the argument arrives
4209 and its size.
4210
4211 There is one complexity here: If this was a parameter that would
4212 have been passed in registers, but wasn't only because it is
4213 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4214 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4215 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4216 0 as it was the previous time. */
4217
4218 pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4219 locate_and_pad_parm (promoted_mode, passed_type,
4220 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4221 1,
4222 #else
4223 #ifdef FUNCTION_INCOMING_ARG
4224 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4225 passed_type,
4226 pretend_named) != 0,
4227 #else
4228 FUNCTION_ARG (args_so_far, promoted_mode,
4229 passed_type,
4230 pretend_named) != 0,
4231 #endif
4232 #endif
4233 fndecl, &stack_args_size, &stack_offset, &arg_size,
4234 &alignment_pad);
4235
4236 {
4237 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4238
4239 if (offset_rtx == const0_rtx)
4240 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4241 else
4242 stack_parm = gen_rtx_MEM (promoted_mode,
4243 gen_rtx_PLUS (Pmode,
4244 internal_arg_pointer,
4245 offset_rtx));
4246
4247 /* If this is a memory ref that contains aggregate components,
4248 mark it as such for cse and loop optimize. Likewise if it
4249 is readonly. */
4250 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4251 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4252 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4253 }
4254
4255 /* If this parameter was passed both in registers and in the stack,
4256 use the copy on the stack. */
4257 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4258 entry_parm = 0;
4259
4260 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4261 /* If this parm was passed part in regs and part in memory,
4262 pretend it arrived entirely in memory
4263 by pushing the register-part onto the stack.
4264
4265 In the special case of a DImode or DFmode that is split,
4266 we could put it together in a pseudoreg directly,
4267 but for now that's not worth bothering with. */
4268
4269 if (entry_parm)
4270 {
4271 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4272 passed_type, named_arg);
4273
4274 if (nregs > 0)
4275 {
4276 current_function_pretend_args_size
4277 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4278 / (PARM_BOUNDARY / BITS_PER_UNIT)
4279 * (PARM_BOUNDARY / BITS_PER_UNIT));
4280
4281 /* Handle calls that pass values in multiple non-contiguous
4282 locations. The Irix 6 ABI has examples of this. */
4283 if (GET_CODE (entry_parm) == PARALLEL)
4284 emit_group_store (validize_mem (stack_parm), entry_parm,
4285 int_size_in_bytes (TREE_TYPE (parm)),
4286 (TYPE_ALIGN (TREE_TYPE (parm))
4287 / BITS_PER_UNIT));
4288 else
4289 move_block_from_reg (REGNO (entry_parm),
4290 validize_mem (stack_parm), nregs,
4291 int_size_in_bytes (TREE_TYPE (parm)));
4292
4293 entry_parm = stack_parm;
4294 }
4295 }
4296 #endif
4297
4298 /* If we didn't decide this parm came in a register,
4299 by default it came on the stack. */
4300 if (entry_parm == 0)
4301 entry_parm = stack_parm;
4302
4303 /* Record permanently how this parm was passed. */
4304 DECL_INCOMING_RTL (parm) = entry_parm;
4305
4306 /* If there is actually space on the stack for this parm,
4307 count it in stack_args_size; otherwise set stack_parm to 0
4308 to indicate there is no preallocated stack slot for the parm. */
4309
4310 if (entry_parm == stack_parm
4311 || (GET_CODE (entry_parm) == PARALLEL
4312 && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4313 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4314 /* On some machines, even if a parm value arrives in a register
4315 there is still an (uninitialized) stack slot allocated for it.
4316
4317 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4318 whether this parameter already has a stack slot allocated,
4319 because an arg block exists only if current_function_args_size
4320 is larger than some threshold, and we haven't calculated that
4321 yet. So, for now, we just assume that stack slots never exist
4322 in this case. */
4323 || REG_PARM_STACK_SPACE (fndecl) > 0
4324 #endif
4325 )
4326 {
4327 stack_args_size.constant += arg_size.constant;
4328 if (arg_size.var)
4329 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4330 }
4331 else
4332 /* No stack slot was pushed for this parm. */
4333 stack_parm = 0;
4334
4335 /* Update info on where next arg arrives in registers. */
4336
4337 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4338 passed_type, named_arg);
4339
4340 /* If we can't trust the parm stack slot to be aligned enough
4341 for its ultimate type, don't use that slot after entry.
4342 We'll make another stack slot, if we need one. */
4343 {
4344 int thisparm_boundary
4345 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4346
4347 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4348 stack_parm = 0;
4349 }
4350
4351 /* If parm was passed in memory, and we need to convert it on entry,
4352 don't store it back in that same slot. */
4353 if (entry_parm != 0
4354 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4355 stack_parm = 0;
4356
4357 #if 0
4358 /* Now adjust STACK_PARM to the mode and precise location
4359 where this parameter should live during execution,
4360 if we discover that it must live in the stack during execution.
4361 To make debuggers happier on big-endian machines, we store
4362 the value in the last bytes of the space available. */
4363
4364 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4365 && stack_parm != 0)
4366 {
4367 rtx offset_rtx;
4368
4369 if (BYTES_BIG_ENDIAN
4370 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4371 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4372 - GET_MODE_SIZE (nominal_mode));
4373
4374 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4375 if (offset_rtx == const0_rtx)
4376 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4377 else
4378 stack_parm = gen_rtx_MEM (nominal_mode,
4379 gen_rtx_PLUS (Pmode,
4380 internal_arg_pointer,
4381 offset_rtx));
4382
4383 /* If this is a memory ref that contains aggregate components,
4384 mark it as such for cse and loop optimize. */
4385 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4386 }
4387 #endif /* 0 */
4388
4389 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4390 in the mode in which it arrives.
4391 STACK_PARM is an RTX for a stack slot where the parameter can live
4392 during the function (in case we want to put it there).
4393 STACK_PARM is 0 if no stack slot was pushed for it.
4394
4395 Now output code if necessary to convert ENTRY_PARM to
4396 the type in which this function declares it,
4397 and store that result in an appropriate place,
4398 which may be a pseudo reg, may be STACK_PARM,
4399 or may be a local stack slot if STACK_PARM is 0.
4400
4401 Set DECL_RTL to that place. */
4402
4403 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4404 {
4405 /* If a BLKmode arrives in registers, copy it to a stack slot.
4406 Handle calls that pass values in multiple non-contiguous
4407 locations. The Irix 6 ABI has examples of this. */
4408 if (GET_CODE (entry_parm) == REG
4409 || GET_CODE (entry_parm) == PARALLEL)
4410 {
4411 int size_stored
4412 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4413 UNITS_PER_WORD);
4414
4415 /* Note that we will be storing an integral number of words.
4416 So we have to be careful to ensure that we allocate an
4417 integral number of words. We do this below in the
4418 assign_stack_local if space was not allocated in the argument
4419 list. If it was, this will not work if PARM_BOUNDARY is not
4420 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4421 if it becomes a problem. */
4422
4423 if (stack_parm == 0)
4424 {
4425 stack_parm
4426 = assign_stack_local (GET_MODE (entry_parm),
4427 size_stored, 0);
4428
4429 /* If this is a memory ref that contains aggregate
4430 components, mark it as such for cse and loop optimize. */
4431 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4432 }
4433
4434 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4435 abort ();
4436
4437 if (TREE_READONLY (parm))
4438 RTX_UNCHANGING_P (stack_parm) = 1;
4439
4440 /* Handle calls that pass values in multiple non-contiguous
4441 locations. The Irix 6 ABI has examples of this. */
4442 if (GET_CODE (entry_parm) == PARALLEL)
4443 emit_group_store (validize_mem (stack_parm), entry_parm,
4444 int_size_in_bytes (TREE_TYPE (parm)),
4445 (TYPE_ALIGN (TREE_TYPE (parm))
4446 / BITS_PER_UNIT));
4447 else
4448 move_block_from_reg (REGNO (entry_parm),
4449 validize_mem (stack_parm),
4450 size_stored / UNITS_PER_WORD,
4451 int_size_in_bytes (TREE_TYPE (parm)));
4452 }
4453 DECL_RTL (parm) = stack_parm;
4454 }
4455 else if (! ((! optimize
4456 && ! DECL_REGISTER (parm)
4457 && ! DECL_INLINE (fndecl))
4458 /* layout_decl may set this. */
4459 || TREE_ADDRESSABLE (parm)
4460 || TREE_SIDE_EFFECTS (parm)
4461 /* If -ffloat-store specified, don't put explicit
4462 float variables into registers. */
4463 || (flag_float_store
4464 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4465 /* Always assign pseudo to structure return or item passed
4466 by invisible reference. */
4467 || passed_pointer || parm == function_result_decl)
4468 {
4469 /* Store the parm in a pseudoregister during the function, but we
4470 may need to do it in a wider mode. */
4471
4472 register rtx parmreg;
4473 int regno, regnoi = 0, regnor = 0;
4474
4475 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4476
4477 promoted_nominal_mode
4478 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4479
4480 parmreg = gen_reg_rtx (promoted_nominal_mode);
4481 mark_user_reg (parmreg);
4482
4483 /* If this was an item that we received a pointer to, set DECL_RTL
4484 appropriately. */
4485 if (passed_pointer)
4486 {
4487 DECL_RTL (parm)
4488 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4489 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4490 }
4491 else
4492 DECL_RTL (parm) = parmreg;
4493
4494 /* Copy the value into the register. */
4495 if (nominal_mode != passed_mode
4496 || promoted_nominal_mode != promoted_mode)
4497 {
4498 int save_tree_used;
4499 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4500 mode, by the caller. We now have to convert it to
4501 NOMINAL_MODE, if different. However, PARMREG may be in
4502 a different mode than NOMINAL_MODE if it is being stored
4503 promoted.
4504
4505 If ENTRY_PARM is a hard register, it might be in a register
4506 not valid for operating in its mode (e.g., an odd-numbered
4507 register for a DFmode). In that case, moves are the only
4508 thing valid, so we can't do a convert from there. This
4509 occurs when the calling sequence allow such misaligned
4510 usages.
4511
4512 In addition, the conversion may involve a call, which could
4513 clobber parameters which haven't been copied to pseudo
4514 registers yet. Therefore, we must first copy the parm to
4515 a pseudo reg here, and save the conversion until after all
4516 parameters have been moved. */
4517
4518 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4519
4520 emit_move_insn (tempreg, validize_mem (entry_parm));
4521
4522 push_to_sequence (conversion_insns);
4523 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4524
4525 /* TREE_USED gets set erroneously during expand_assignment. */
4526 save_tree_used = TREE_USED (parm);
4527 expand_assignment (parm,
4528 make_tree (nominal_type, tempreg), 0, 0);
4529 TREE_USED (parm) = save_tree_used;
4530 conversion_insns = get_insns ();
4531 did_conversion = 1;
4532 end_sequence ();
4533 }
4534 else
4535 emit_move_insn (parmreg, validize_mem (entry_parm));
4536
4537 /* If we were passed a pointer but the actual value
4538 can safely live in a register, put it in one. */
4539 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4540 && ! ((! optimize
4541 && ! DECL_REGISTER (parm)
4542 && ! DECL_INLINE (fndecl))
4543 /* layout_decl may set this. */
4544 || TREE_ADDRESSABLE (parm)
4545 || TREE_SIDE_EFFECTS (parm)
4546 /* If -ffloat-store specified, don't put explicit
4547 float variables into registers. */
4548 || (flag_float_store
4549 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4550 {
4551 /* We can't use nominal_mode, because it will have been set to
4552 Pmode above. We must use the actual mode of the parm. */
4553 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4554 mark_user_reg (parmreg);
4555 emit_move_insn (parmreg, DECL_RTL (parm));
4556 DECL_RTL (parm) = parmreg;
4557 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4558 now the parm. */
4559 stack_parm = 0;
4560 }
4561 #ifdef FUNCTION_ARG_CALLEE_COPIES
4562 /* If we are passed an arg by reference and it is our responsibility
4563 to make a copy, do it now.
4564 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4565 original argument, so we must recreate them in the call to
4566 FUNCTION_ARG_CALLEE_COPIES. */
4567 /* ??? Later add code to handle the case that if the argument isn't
4568 modified, don't do the copy. */
4569
4570 else if (passed_pointer
4571 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4572 TYPE_MODE (DECL_ARG_TYPE (parm)),
4573 DECL_ARG_TYPE (parm),
4574 named_arg)
4575 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4576 {
4577 rtx copy;
4578 tree type = DECL_ARG_TYPE (parm);
4579
4580 /* This sequence may involve a library call perhaps clobbering
4581 registers that haven't been copied to pseudos yet. */
4582
4583 push_to_sequence (conversion_insns);
4584
4585 if (TYPE_SIZE (type) == 0
4586 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4587 /* This is a variable sized object. */
4588 copy = gen_rtx_MEM (BLKmode,
4589 allocate_dynamic_stack_space
4590 (expr_size (parm), NULL_RTX,
4591 TYPE_ALIGN (type)));
4592 else
4593 copy = assign_stack_temp (TYPE_MODE (type),
4594 int_size_in_bytes (type), 1);
4595 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4596 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4597
4598 store_expr (parm, copy, 0);
4599 emit_move_insn (parmreg, XEXP (copy, 0));
4600 if (current_function_check_memory_usage)
4601 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4602 XEXP (copy, 0), Pmode,
4603 GEN_INT (int_size_in_bytes (type)),
4604 TYPE_MODE (sizetype),
4605 GEN_INT (MEMORY_USE_RW),
4606 TYPE_MODE (integer_type_node));
4607 conversion_insns = get_insns ();
4608 did_conversion = 1;
4609 end_sequence ();
4610 }
4611 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4612
4613 /* In any case, record the parm's desired stack location
4614 in case we later discover it must live in the stack.
4615
4616 If it is a COMPLEX value, store the stack location for both
4617 halves. */
4618
4619 if (GET_CODE (parmreg) == CONCAT)
4620 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4621 else
4622 regno = REGNO (parmreg);
4623
4624 if (regno >= max_parm_reg)
4625 {
4626 rtx *new;
4627 int old_max_parm_reg = max_parm_reg;
4628
4629 /* It's slow to expand this one register at a time,
4630 but it's also rare and we need max_parm_reg to be
4631 precisely correct. */
4632 max_parm_reg = regno + 1;
4633 new = (rtx *) xrealloc (parm_reg_stack_loc,
4634 max_parm_reg * sizeof (rtx));
4635 bzero ((char *) (new + old_max_parm_reg),
4636 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4637 parm_reg_stack_loc = new;
4638 }
4639
4640 if (GET_CODE (parmreg) == CONCAT)
4641 {
4642 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4643
4644 regnor = REGNO (gen_realpart (submode, parmreg));
4645 regnoi = REGNO (gen_imagpart (submode, parmreg));
4646
4647 if (stack_parm != 0)
4648 {
4649 parm_reg_stack_loc[regnor]
4650 = gen_realpart (submode, stack_parm);
4651 parm_reg_stack_loc[regnoi]
4652 = gen_imagpart (submode, stack_parm);
4653 }
4654 else
4655 {
4656 parm_reg_stack_loc[regnor] = 0;
4657 parm_reg_stack_loc[regnoi] = 0;
4658 }
4659 }
4660 else
4661 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4662
4663 /* Mark the register as eliminable if we did no conversion
4664 and it was copied from memory at a fixed offset,
4665 and the arg pointer was not copied to a pseudo-reg.
4666 If the arg pointer is a pseudo reg or the offset formed
4667 an invalid address, such memory-equivalences
4668 as we make here would screw up life analysis for it. */
4669 if (nominal_mode == passed_mode
4670 && ! did_conversion
4671 && stack_parm != 0
4672 && GET_CODE (stack_parm) == MEM
4673 && stack_offset.var == 0
4674 && reg_mentioned_p (virtual_incoming_args_rtx,
4675 XEXP (stack_parm, 0)))
4676 {
4677 rtx linsn = get_last_insn ();
4678 rtx sinsn, set;
4679
4680 /* Mark complex types separately. */
4681 if (GET_CODE (parmreg) == CONCAT)
4682 /* Scan backwards for the set of the real and
4683 imaginary parts. */
4684 for (sinsn = linsn; sinsn != 0;
4685 sinsn = prev_nonnote_insn (sinsn))
4686 {
4687 set = single_set (sinsn);
4688 if (set != 0
4689 && SET_DEST (set) == regno_reg_rtx [regnoi])
4690 REG_NOTES (sinsn)
4691 = gen_rtx_EXPR_LIST (REG_EQUIV,
4692 parm_reg_stack_loc[regnoi],
4693 REG_NOTES (sinsn));
4694 else if (set != 0
4695 && SET_DEST (set) == regno_reg_rtx [regnor])
4696 REG_NOTES (sinsn)
4697 = gen_rtx_EXPR_LIST (REG_EQUIV,
4698 parm_reg_stack_loc[regnor],
4699 REG_NOTES (sinsn));
4700 }
4701 else if ((set = single_set (linsn)) != 0
4702 && SET_DEST (set) == parmreg)
4703 REG_NOTES (linsn)
4704 = gen_rtx_EXPR_LIST (REG_EQUIV,
4705 stack_parm, REG_NOTES (linsn));
4706 }
4707
4708 /* For pointer data type, suggest pointer register. */
4709 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4710 mark_reg_pointer (parmreg,
4711 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4712 / BITS_PER_UNIT));
4713 }
4714 else
4715 {
4716 /* Value must be stored in the stack slot STACK_PARM
4717 during function execution. */
4718
4719 if (promoted_mode != nominal_mode)
4720 {
4721 /* Conversion is required. */
4722 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4723
4724 emit_move_insn (tempreg, validize_mem (entry_parm));
4725
4726 push_to_sequence (conversion_insns);
4727 entry_parm = convert_to_mode (nominal_mode, tempreg,
4728 TREE_UNSIGNED (TREE_TYPE (parm)));
4729 if (stack_parm)
4730 {
4731 /* ??? This may need a big-endian conversion on sparc64. */
4732 stack_parm = change_address (stack_parm, nominal_mode,
4733 NULL_RTX);
4734 }
4735 conversion_insns = get_insns ();
4736 did_conversion = 1;
4737 end_sequence ();
4738 }
4739
4740 if (entry_parm != stack_parm)
4741 {
4742 if (stack_parm == 0)
4743 {
4744 stack_parm
4745 = assign_stack_local (GET_MODE (entry_parm),
4746 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4747 /* If this is a memory ref that contains aggregate components,
4748 mark it as such for cse and loop optimize. */
4749 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4750 }
4751
4752 if (promoted_mode != nominal_mode)
4753 {
4754 push_to_sequence (conversion_insns);
4755 emit_move_insn (validize_mem (stack_parm),
4756 validize_mem (entry_parm));
4757 conversion_insns = get_insns ();
4758 end_sequence ();
4759 }
4760 else
4761 emit_move_insn (validize_mem (stack_parm),
4762 validize_mem (entry_parm));
4763 }
4764 if (current_function_check_memory_usage)
4765 {
4766 push_to_sequence (conversion_insns);
4767 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4768 XEXP (stack_parm, 0), Pmode,
4769 GEN_INT (GET_MODE_SIZE (GET_MODE
4770 (entry_parm))),
4771 TYPE_MODE (sizetype),
4772 GEN_INT (MEMORY_USE_RW),
4773 TYPE_MODE (integer_type_node));
4774
4775 conversion_insns = get_insns ();
4776 end_sequence ();
4777 }
4778 DECL_RTL (parm) = stack_parm;
4779 }
4780
4781 /* If this "parameter" was the place where we are receiving the
4782 function's incoming structure pointer, set up the result. */
4783 if (parm == function_result_decl)
4784 {
4785 tree result = DECL_RESULT (fndecl);
4786 tree restype = TREE_TYPE (result);
4787
4788 DECL_RTL (result)
4789 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4790
4791 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4792 AGGREGATE_TYPE_P (restype));
4793 }
4794
4795 if (TREE_THIS_VOLATILE (parm))
4796 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4797 if (TREE_READONLY (parm))
4798 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4799 }
4800
4801 /* Output all parameter conversion instructions (possibly including calls)
4802 now that all parameters have been copied out of hard registers. */
4803 emit_insns (conversion_insns);
4804
4805 last_parm_insn = get_last_insn ();
4806
4807 current_function_args_size = stack_args_size.constant;
4808
4809 /* Adjust function incoming argument size for alignment and
4810 minimum length. */
4811
4812 #ifdef REG_PARM_STACK_SPACE
4813 #ifndef MAYBE_REG_PARM_STACK_SPACE
4814 current_function_args_size = MAX (current_function_args_size,
4815 REG_PARM_STACK_SPACE (fndecl));
4816 #endif
4817 #endif
4818
4819 #ifdef STACK_BOUNDARY
4820 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4821
4822 current_function_args_size
4823 = ((current_function_args_size + STACK_BYTES - 1)
4824 / STACK_BYTES) * STACK_BYTES;
4825 #endif
4826
4827 #ifdef ARGS_GROW_DOWNWARD
4828 current_function_arg_offset_rtx
4829 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4830 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4831 size_int (-stack_args_size.constant)),
4832 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4833 #else
4834 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4835 #endif
4836
4837 /* See how many bytes, if any, of its args a function should try to pop
4838 on return. */
4839
4840 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4841 current_function_args_size);
4842
4843 /* For stdarg.h function, save info about
4844 regs and stack space used by the named args. */
4845
4846 if (!hide_last_arg)
4847 current_function_args_info = args_so_far;
4848
4849 /* Set the rtx used for the function return value. Put this in its
4850 own variable so any optimizers that need this information don't have
4851 to include tree.h. Do this here so it gets done when an inlined
4852 function gets output. */
4853
4854 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4855 }
4856 \f
4857 /* Indicate whether REGNO is an incoming argument to the current function
4858 that was promoted to a wider mode. If so, return the RTX for the
4859 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4860 that REGNO is promoted from and whether the promotion was signed or
4861 unsigned. */
4862
4863 #ifdef PROMOTE_FUNCTION_ARGS
4864
4865 rtx
4866 promoted_input_arg (regno, pmode, punsignedp)
4867 int regno;
4868 enum machine_mode *pmode;
4869 int *punsignedp;
4870 {
4871 tree arg;
4872
4873 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4874 arg = TREE_CHAIN (arg))
4875 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4876 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4877 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4878 {
4879 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4880 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4881
4882 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4883 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4884 && mode != DECL_MODE (arg))
4885 {
4886 *pmode = DECL_MODE (arg);
4887 *punsignedp = unsignedp;
4888 return DECL_INCOMING_RTL (arg);
4889 }
4890 }
4891
4892 return 0;
4893 }
4894
4895 #endif
4896 \f
4897 /* Compute the size and offset from the start of the stacked arguments for a
4898 parm passed in mode PASSED_MODE and with type TYPE.
4899
4900 INITIAL_OFFSET_PTR points to the current offset into the stacked
4901 arguments.
4902
4903 The starting offset and size for this parm are returned in *OFFSET_PTR
4904 and *ARG_SIZE_PTR, respectively.
4905
4906 IN_REGS is non-zero if the argument will be passed in registers. It will
4907 never be set if REG_PARM_STACK_SPACE is not defined.
4908
4909 FNDECL is the function in which the argument was defined.
4910
4911 There are two types of rounding that are done. The first, controlled by
4912 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4913 list to be aligned to the specific boundary (in bits). This rounding
4914 affects the initial and starting offsets, but not the argument size.
4915
4916 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4917 optionally rounds the size of the parm to PARM_BOUNDARY. The
4918 initial offset is not affected by this rounding, while the size always
4919 is and the starting offset may be. */
4920
4921 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4922 initial_offset_ptr is positive because locate_and_pad_parm's
4923 callers pass in the total size of args so far as
4924 initial_offset_ptr. arg_size_ptr is always positive.*/
4925
4926 void
4927 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4928 initial_offset_ptr, offset_ptr, arg_size_ptr,
4929 alignment_pad)
4930 enum machine_mode passed_mode;
4931 tree type;
4932 int in_regs ATTRIBUTE_UNUSED;
4933 tree fndecl ATTRIBUTE_UNUSED;
4934 struct args_size *initial_offset_ptr;
4935 struct args_size *offset_ptr;
4936 struct args_size *arg_size_ptr;
4937 struct args_size *alignment_pad;
4938
4939 {
4940 tree sizetree
4941 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4942 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4943 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4944
4945 #ifdef REG_PARM_STACK_SPACE
4946 /* If we have found a stack parm before we reach the end of the
4947 area reserved for registers, skip that area. */
4948 if (! in_regs)
4949 {
4950 int reg_parm_stack_space = 0;
4951
4952 #ifdef MAYBE_REG_PARM_STACK_SPACE
4953 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4954 #else
4955 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4956 #endif
4957 if (reg_parm_stack_space > 0)
4958 {
4959 if (initial_offset_ptr->var)
4960 {
4961 initial_offset_ptr->var
4962 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4963 size_int (reg_parm_stack_space));
4964 initial_offset_ptr->constant = 0;
4965 }
4966 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4967 initial_offset_ptr->constant = reg_parm_stack_space;
4968 }
4969 }
4970 #endif /* REG_PARM_STACK_SPACE */
4971
4972 arg_size_ptr->var = 0;
4973 arg_size_ptr->constant = 0;
4974
4975 #ifdef ARGS_GROW_DOWNWARD
4976 if (initial_offset_ptr->var)
4977 {
4978 offset_ptr->constant = 0;
4979 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4980 initial_offset_ptr->var);
4981 }
4982 else
4983 {
4984 offset_ptr->constant = - initial_offset_ptr->constant;
4985 offset_ptr->var = 0;
4986 }
4987 if (where_pad != none
4988 && (TREE_CODE (sizetree) != INTEGER_CST
4989 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4990 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4991 SUB_PARM_SIZE (*offset_ptr, sizetree);
4992 if (where_pad != downward)
4993 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
4994 if (initial_offset_ptr->var)
4995 {
4996 arg_size_ptr->var = size_binop (MINUS_EXPR,
4997 size_binop (MINUS_EXPR,
4998 integer_zero_node,
4999 initial_offset_ptr->var),
5000 offset_ptr->var);
5001 }
5002 else
5003 {
5004 arg_size_ptr->constant = (- initial_offset_ptr->constant
5005 - offset_ptr->constant);
5006 }
5007 #else /* !ARGS_GROW_DOWNWARD */
5008 pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5009 *offset_ptr = *initial_offset_ptr;
5010
5011 #ifdef PUSH_ROUNDING
5012 if (passed_mode != BLKmode)
5013 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5014 #endif
5015
5016 /* Pad_below needs the pre-rounded size to know how much to pad below
5017 so this must be done before rounding up. */
5018 if (where_pad == downward
5019 /* However, BLKmode args passed in regs have their padding done elsewhere.
5020 The stack slot must be able to hold the entire register. */
5021 && !(in_regs && passed_mode == BLKmode))
5022 pad_below (offset_ptr, passed_mode, sizetree);
5023
5024 if (where_pad != none
5025 && (TREE_CODE (sizetree) != INTEGER_CST
5026 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
5027 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5028
5029 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5030 #endif /* ARGS_GROW_DOWNWARD */
5031 }
5032
5033 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5034 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
5035
5036 static void
5037 pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5038 struct args_size *offset_ptr;
5039 int boundary;
5040 struct args_size *alignment_pad;
5041 {
5042 tree save_var = NULL_TREE;
5043 HOST_WIDE_INT save_constant = 0;
5044
5045 int boundary_in_bytes = boundary / BITS_PER_UNIT;
5046
5047 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5048 {
5049 save_var = offset_ptr->var;
5050 save_constant = offset_ptr->constant;
5051 }
5052
5053 alignment_pad->var = NULL_TREE;
5054 alignment_pad->constant = 0;
5055
5056 if (boundary > BITS_PER_UNIT)
5057 {
5058 if (offset_ptr->var)
5059 {
5060 offset_ptr->var =
5061 #ifdef ARGS_GROW_DOWNWARD
5062 round_down
5063 #else
5064 round_up
5065 #endif
5066 (ARGS_SIZE_TREE (*offset_ptr),
5067 boundary / BITS_PER_UNIT);
5068 offset_ptr->constant = 0; /*?*/
5069 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5070 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, save_var);
5071 }
5072 else
5073 {
5074 offset_ptr->constant =
5075 #ifdef ARGS_GROW_DOWNWARD
5076 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5077 #else
5078 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5079 #endif
5080 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5081 alignment_pad->constant = offset_ptr->constant - save_constant;
5082 }
5083 }
5084 }
5085
5086 #ifndef ARGS_GROW_DOWNWARD
5087 static void
5088 pad_below (offset_ptr, passed_mode, sizetree)
5089 struct args_size *offset_ptr;
5090 enum machine_mode passed_mode;
5091 tree sizetree;
5092 {
5093 if (passed_mode != BLKmode)
5094 {
5095 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5096 offset_ptr->constant
5097 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5098 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5099 - GET_MODE_SIZE (passed_mode));
5100 }
5101 else
5102 {
5103 if (TREE_CODE (sizetree) != INTEGER_CST
5104 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5105 {
5106 /* Round the size up to multiple of PARM_BOUNDARY bits. */
5107 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5108 /* Add it in. */
5109 ADD_PARM_SIZE (*offset_ptr, s2);
5110 SUB_PARM_SIZE (*offset_ptr, sizetree);
5111 }
5112 }
5113 }
5114 #endif
5115
5116 #ifdef ARGS_GROW_DOWNWARD
5117 static tree
5118 round_down (value, divisor)
5119 tree value;
5120 int divisor;
5121 {
5122 return size_binop (MULT_EXPR,
5123 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
5124 size_int (divisor));
5125 }
5126 #endif
5127 \f
5128 /* Walk the tree of blocks describing the binding levels within a function
5129 and warn about uninitialized variables.
5130 This is done after calling flow_analysis and before global_alloc
5131 clobbers the pseudo-regs to hard regs. */
5132
5133 void
5134 uninitialized_vars_warning (block)
5135 tree block;
5136 {
5137 register tree decl, sub;
5138 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5139 {
5140 if (warn_uninitialized
5141 && TREE_CODE (decl) == VAR_DECL
5142 /* These warnings are unreliable for and aggregates
5143 because assigning the fields one by one can fail to convince
5144 flow.c that the entire aggregate was initialized.
5145 Unions are troublesome because members may be shorter. */
5146 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5147 && DECL_RTL (decl) != 0
5148 && GET_CODE (DECL_RTL (decl)) == REG
5149 /* Global optimizations can make it difficult to determine if a
5150 particular variable has been initialized. However, a VAR_DECL
5151 with a nonzero DECL_INITIAL had an initializer, so do not
5152 claim it is potentially uninitialized.
5153
5154 We do not care about the actual value in DECL_INITIAL, so we do
5155 not worry that it may be a dangling pointer. */
5156 && DECL_INITIAL (decl) == NULL_TREE
5157 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5158 warning_with_decl (decl,
5159 "`%s' might be used uninitialized in this function");
5160 if (extra_warnings
5161 && TREE_CODE (decl) == VAR_DECL
5162 && DECL_RTL (decl) != 0
5163 && GET_CODE (DECL_RTL (decl)) == REG
5164 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5165 warning_with_decl (decl,
5166 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5167 }
5168 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5169 uninitialized_vars_warning (sub);
5170 }
5171
5172 /* Do the appropriate part of uninitialized_vars_warning
5173 but for arguments instead of local variables. */
5174
5175 void
5176 setjmp_args_warning ()
5177 {
5178 register tree decl;
5179 for (decl = DECL_ARGUMENTS (current_function_decl);
5180 decl; decl = TREE_CHAIN (decl))
5181 if (DECL_RTL (decl) != 0
5182 && GET_CODE (DECL_RTL (decl)) == REG
5183 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5184 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5185 }
5186
5187 /* If this function call setjmp, put all vars into the stack
5188 unless they were declared `register'. */
5189
5190 void
5191 setjmp_protect (block)
5192 tree block;
5193 {
5194 register tree decl, sub;
5195 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5196 if ((TREE_CODE (decl) == VAR_DECL
5197 || TREE_CODE (decl) == PARM_DECL)
5198 && DECL_RTL (decl) != 0
5199 && (GET_CODE (DECL_RTL (decl)) == REG
5200 || (GET_CODE (DECL_RTL (decl)) == MEM
5201 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5202 /* If this variable came from an inline function, it must be
5203 that its life doesn't overlap the setjmp. If there was a
5204 setjmp in the function, it would already be in memory. We
5205 must exclude such variable because their DECL_RTL might be
5206 set to strange things such as virtual_stack_vars_rtx. */
5207 && ! DECL_FROM_INLINE (decl)
5208 && (
5209 #ifdef NON_SAVING_SETJMP
5210 /* If longjmp doesn't restore the registers,
5211 don't put anything in them. */
5212 NON_SAVING_SETJMP
5213 ||
5214 #endif
5215 ! DECL_REGISTER (decl)))
5216 put_var_into_stack (decl);
5217 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5218 setjmp_protect (sub);
5219 }
5220 \f
5221 /* Like the previous function, but for args instead of local variables. */
5222
5223 void
5224 setjmp_protect_args ()
5225 {
5226 register tree decl;
5227 for (decl = DECL_ARGUMENTS (current_function_decl);
5228 decl; decl = TREE_CHAIN (decl))
5229 if ((TREE_CODE (decl) == VAR_DECL
5230 || TREE_CODE (decl) == PARM_DECL)
5231 && DECL_RTL (decl) != 0
5232 && (GET_CODE (DECL_RTL (decl)) == REG
5233 || (GET_CODE (DECL_RTL (decl)) == MEM
5234 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5235 && (
5236 /* If longjmp doesn't restore the registers,
5237 don't put anything in them. */
5238 #ifdef NON_SAVING_SETJMP
5239 NON_SAVING_SETJMP
5240 ||
5241 #endif
5242 ! DECL_REGISTER (decl)))
5243 put_var_into_stack (decl);
5244 }
5245 \f
5246 /* Return the context-pointer register corresponding to DECL,
5247 or 0 if it does not need one. */
5248
5249 rtx
5250 lookup_static_chain (decl)
5251 tree decl;
5252 {
5253 tree context = decl_function_context (decl);
5254 tree link;
5255
5256 if (context == 0
5257 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5258 return 0;
5259
5260 /* We treat inline_function_decl as an alias for the current function
5261 because that is the inline function whose vars, types, etc.
5262 are being merged into the current function.
5263 See expand_inline_function. */
5264 if (context == current_function_decl || context == inline_function_decl)
5265 return virtual_stack_vars_rtx;
5266
5267 for (link = context_display; link; link = TREE_CHAIN (link))
5268 if (TREE_PURPOSE (link) == context)
5269 return RTL_EXPR_RTL (TREE_VALUE (link));
5270
5271 abort ();
5272 }
5273 \f
5274 /* Convert a stack slot address ADDR for variable VAR
5275 (from a containing function)
5276 into an address valid in this function (using a static chain). */
5277
5278 rtx
5279 fix_lexical_addr (addr, var)
5280 rtx addr;
5281 tree var;
5282 {
5283 rtx basereg;
5284 HOST_WIDE_INT displacement;
5285 tree context = decl_function_context (var);
5286 struct function *fp;
5287 rtx base = 0;
5288
5289 /* If this is the present function, we need not do anything. */
5290 if (context == current_function_decl || context == inline_function_decl)
5291 return addr;
5292
5293 for (fp = outer_function_chain; fp; fp = fp->next)
5294 if (fp->decl == context)
5295 break;
5296
5297 if (fp == 0)
5298 abort ();
5299
5300 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5301 addr = XEXP (XEXP (addr, 0), 0);
5302
5303 /* Decode given address as base reg plus displacement. */
5304 if (GET_CODE (addr) == REG)
5305 basereg = addr, displacement = 0;
5306 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5307 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5308 else
5309 abort ();
5310
5311 /* We accept vars reached via the containing function's
5312 incoming arg pointer and via its stack variables pointer. */
5313 if (basereg == fp->internal_arg_pointer)
5314 {
5315 /* If reached via arg pointer, get the arg pointer value
5316 out of that function's stack frame.
5317
5318 There are two cases: If a separate ap is needed, allocate a
5319 slot in the outer function for it and dereference it that way.
5320 This is correct even if the real ap is actually a pseudo.
5321 Otherwise, just adjust the offset from the frame pointer to
5322 compensate. */
5323
5324 #ifdef NEED_SEPARATE_AP
5325 rtx addr;
5326
5327 if (fp->x_arg_pointer_save_area == 0)
5328 fp->x_arg_pointer_save_area
5329 = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5330
5331 addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
5332 addr = memory_address (Pmode, addr);
5333
5334 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5335 #else
5336 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5337 base = lookup_static_chain (var);
5338 #endif
5339 }
5340
5341 else if (basereg == virtual_stack_vars_rtx)
5342 {
5343 /* This is the same code as lookup_static_chain, duplicated here to
5344 avoid an extra call to decl_function_context. */
5345 tree link;
5346
5347 for (link = context_display; link; link = TREE_CHAIN (link))
5348 if (TREE_PURPOSE (link) == context)
5349 {
5350 base = RTL_EXPR_RTL (TREE_VALUE (link));
5351 break;
5352 }
5353 }
5354
5355 if (base == 0)
5356 abort ();
5357
5358 /* Use same offset, relative to appropriate static chain or argument
5359 pointer. */
5360 return plus_constant (base, displacement);
5361 }
5362 \f
5363 /* Return the address of the trampoline for entering nested fn FUNCTION.
5364 If necessary, allocate a trampoline (in the stack frame)
5365 and emit rtl to initialize its contents (at entry to this function). */
5366
5367 rtx
5368 trampoline_address (function)
5369 tree function;
5370 {
5371 tree link;
5372 tree rtlexp;
5373 rtx tramp;
5374 struct function *fp;
5375 tree fn_context;
5376
5377 /* Find an existing trampoline and return it. */
5378 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5379 if (TREE_PURPOSE (link) == function)
5380 return
5381 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5382
5383 for (fp = outer_function_chain; fp; fp = fp->next)
5384 for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5385 if (TREE_PURPOSE (link) == function)
5386 {
5387 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5388 function);
5389 return round_trampoline_addr (tramp);
5390 }
5391
5392 /* None exists; we must make one. */
5393
5394 /* Find the `struct function' for the function containing FUNCTION. */
5395 fp = 0;
5396 fn_context = decl_function_context (function);
5397 if (fn_context != current_function_decl
5398 && fn_context != inline_function_decl)
5399 for (fp = outer_function_chain; fp; fp = fp->next)
5400 if (fp->decl == fn_context)
5401 break;
5402
5403 /* Allocate run-time space for this trampoline
5404 (usually in the defining function's stack frame). */
5405 #ifdef ALLOCATE_TRAMPOLINE
5406 tramp = ALLOCATE_TRAMPOLINE (fp);
5407 #else
5408 /* If rounding needed, allocate extra space
5409 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5410 #ifdef TRAMPOLINE_ALIGNMENT
5411 #define TRAMPOLINE_REAL_SIZE \
5412 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5413 #else
5414 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5415 #endif
5416 tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5417 fp ? fp : cfun);
5418 #endif
5419
5420 /* Record the trampoline for reuse and note it for later initialization
5421 by expand_function_end. */
5422 if (fp != 0)
5423 {
5424 push_obstacks (fp->function_maybepermanent_obstack,
5425 fp->function_maybepermanent_obstack);
5426 rtlexp = make_node (RTL_EXPR);
5427 RTL_EXPR_RTL (rtlexp) = tramp;
5428 fp->x_trampoline_list = tree_cons (function, rtlexp,
5429 fp->x_trampoline_list);
5430 pop_obstacks ();
5431 }
5432 else
5433 {
5434 /* Make the RTL_EXPR node temporary, not momentary, so that the
5435 trampoline_list doesn't become garbage. */
5436 int momentary = suspend_momentary ();
5437 rtlexp = make_node (RTL_EXPR);
5438 resume_momentary (momentary);
5439
5440 RTL_EXPR_RTL (rtlexp) = tramp;
5441 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5442 }
5443
5444 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5445 return round_trampoline_addr (tramp);
5446 }
5447
5448 /* Given a trampoline address,
5449 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5450
5451 static rtx
5452 round_trampoline_addr (tramp)
5453 rtx tramp;
5454 {
5455 #ifdef TRAMPOLINE_ALIGNMENT
5456 /* Round address up to desired boundary. */
5457 rtx temp = gen_reg_rtx (Pmode);
5458 temp = expand_binop (Pmode, add_optab, tramp,
5459 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5460 temp, 0, OPTAB_LIB_WIDEN);
5461 tramp = expand_binop (Pmode, and_optab, temp,
5462 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5463 temp, 0, OPTAB_LIB_WIDEN);
5464 #endif
5465 return tramp;
5466 }
5467 \f
5468 /* The functions identify_blocks and reorder_blocks provide a way to
5469 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5470 duplicate portions of the RTL code. Call identify_blocks before
5471 changing the RTL, and call reorder_blocks after. */
5472
5473 /* Put all this function's BLOCK nodes including those that are chained
5474 onto the first block into a vector, and return it.
5475 Also store in each NOTE for the beginning or end of a block
5476 the index of that block in the vector.
5477 The arguments are BLOCK, the chain of top-level blocks of the function,
5478 and INSNS, the insn chain of the function. */
5479
5480 void
5481 identify_blocks (block, insns)
5482 tree block;
5483 rtx insns;
5484 {
5485 int n_blocks;
5486 tree *block_vector;
5487 tree *block_stack;
5488 int depth = 0;
5489 int current_block_number = 1;
5490 rtx insn;
5491
5492 if (block == 0)
5493 return;
5494
5495 /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5496 depth-first order. */
5497 n_blocks = all_blocks (block, 0);
5498 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5499 all_blocks (block, block_vector);
5500
5501 block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5502
5503 for (insn = insns; insn; insn = NEXT_INSN (insn))
5504 if (GET_CODE (insn) == NOTE)
5505 {
5506 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5507 {
5508 tree b;
5509
5510 /* If there are more block notes than BLOCKs, something
5511 is badly wrong. */
5512 if (current_block_number == n_blocks)
5513 abort ();
5514
5515 b = block_vector[current_block_number++];
5516 NOTE_BLOCK (insn) = b;
5517 block_stack[depth++] = b;
5518 }
5519 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5520 {
5521 if (depth == 0)
5522 /* There are more NOTE_INSN_BLOCK_ENDs that
5523 NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
5524 abort ();
5525
5526 NOTE_BLOCK (insn) = block_stack[--depth];
5527 }
5528 }
5529
5530 /* In whole-function mode, we might not have seen the whole function
5531 yet, so we might not use up all the blocks. */
5532 if (n_blocks != current_block_number
5533 && !cfun->x_whole_function_mode_p)
5534 abort ();
5535
5536 free (block_vector);
5537 free (block_stack);
5538 }
5539
5540 /* Given a revised instruction chain, rebuild the tree structure of
5541 BLOCK nodes to correspond to the new order of RTL. The new block
5542 tree is inserted below TOP_BLOCK. Returns the current top-level
5543 block. */
5544
5545 tree
5546 reorder_blocks (block, insns)
5547 tree block;
5548 rtx insns;
5549 {
5550 tree current_block = block;
5551 rtx insn;
5552
5553 if (block == NULL_TREE)
5554 return NULL_TREE;
5555
5556 /* Prune the old trees away, so that it doesn't get in the way. */
5557 BLOCK_SUBBLOCKS (current_block) = 0;
5558 BLOCK_CHAIN (current_block) = 0;
5559
5560 for (insn = insns; insn; insn = NEXT_INSN (insn))
5561 if (GET_CODE (insn) == NOTE)
5562 {
5563 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5564 {
5565 tree block = NOTE_BLOCK (insn);
5566 /* If we have seen this block before, copy it. */
5567 if (TREE_ASM_WRITTEN (block))
5568 block = copy_node (block);
5569 BLOCK_SUBBLOCKS (block) = 0;
5570 TREE_ASM_WRITTEN (block) = 1;
5571 BLOCK_SUPERCONTEXT (block) = current_block;
5572 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5573 BLOCK_SUBBLOCKS (current_block) = block;
5574 current_block = block;
5575 }
5576 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5577 {
5578 BLOCK_SUBBLOCKS (current_block)
5579 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5580 current_block = BLOCK_SUPERCONTEXT (current_block);
5581 }
5582 }
5583
5584 BLOCK_SUBBLOCKS (current_block)
5585 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5586 return current_block;
5587 }
5588
5589 /* Reverse the order of elements in the chain T of blocks,
5590 and return the new head of the chain (old last element). */
5591
5592 static tree
5593 blocks_nreverse (t)
5594 tree t;
5595 {
5596 register tree prev = 0, decl, next;
5597 for (decl = t; decl; decl = next)
5598 {
5599 next = BLOCK_CHAIN (decl);
5600 BLOCK_CHAIN (decl) = prev;
5601 prev = decl;
5602 }
5603 return prev;
5604 }
5605
5606 /* Count the subblocks of the list starting with BLOCK, and list them
5607 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5608 blocks. */
5609
5610 static int
5611 all_blocks (block, vector)
5612 tree block;
5613 tree *vector;
5614 {
5615 int n_blocks = 0;
5616
5617 while (block)
5618 {
5619 TREE_ASM_WRITTEN (block) = 0;
5620
5621 /* Record this block. */
5622 if (vector)
5623 vector[n_blocks] = block;
5624
5625 ++n_blocks;
5626
5627 /* Record the subblocks, and their subblocks... */
5628 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5629 vector ? vector + n_blocks : 0);
5630 block = BLOCK_CHAIN (block);
5631 }
5632
5633 return n_blocks;
5634 }
5635 \f
5636 /* Allocate a function structure and reset its contents to the defaults. */
5637 static void
5638 prepare_function_start ()
5639 {
5640 cfun = (struct function *) xcalloc (1, sizeof (struct function));
5641
5642 init_stmt_for_function ();
5643 init_eh_for_function ();
5644
5645 cse_not_expected = ! optimize;
5646
5647 /* Caller save not needed yet. */
5648 caller_save_needed = 0;
5649
5650 /* No stack slots have been made yet. */
5651 stack_slot_list = 0;
5652
5653 current_function_has_nonlocal_label = 0;
5654 current_function_has_nonlocal_goto = 0;
5655
5656 /* There is no stack slot for handling nonlocal gotos. */
5657 nonlocal_goto_handler_slots = 0;
5658 nonlocal_goto_stack_level = 0;
5659
5660 /* No labels have been declared for nonlocal use. */
5661 nonlocal_labels = 0;
5662 nonlocal_goto_handler_labels = 0;
5663
5664 /* No function calls so far in this function. */
5665 function_call_count = 0;
5666
5667 /* No parm regs have been allocated.
5668 (This is important for output_inline_function.) */
5669 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5670
5671 /* Initialize the RTL mechanism. */
5672 init_emit ();
5673
5674 /* Initialize the queue of pending postincrement and postdecrements,
5675 and some other info in expr.c. */
5676 init_expr ();
5677
5678 /* We haven't done register allocation yet. */
5679 reg_renumber = 0;
5680
5681 init_varasm_status (cfun);
5682
5683 /* Clear out data used for inlining. */
5684 cfun->inlinable = 0;
5685 cfun->original_decl_initial = 0;
5686 cfun->original_arg_vector = 0;
5687
5688 cfun->stack_alignment_needed = 0;
5689 #ifdef STACK_BOUNDARY
5690 cfun->preferred_stack_boundary = STACK_BOUNDARY;
5691 #endif
5692
5693 /* Set if a call to setjmp is seen. */
5694 current_function_calls_setjmp = 0;
5695
5696 /* Set if a call to longjmp is seen. */
5697 current_function_calls_longjmp = 0;
5698
5699 current_function_calls_alloca = 0;
5700 current_function_contains_functions = 0;
5701 current_function_is_leaf = 0;
5702 current_function_sp_is_unchanging = 0;
5703 current_function_uses_only_leaf_regs = 0;
5704 current_function_has_computed_jump = 0;
5705 current_function_is_thunk = 0;
5706
5707 current_function_returns_pcc_struct = 0;
5708 current_function_returns_struct = 0;
5709 current_function_epilogue_delay_list = 0;
5710 current_function_uses_const_pool = 0;
5711 current_function_uses_pic_offset_table = 0;
5712 current_function_cannot_inline = 0;
5713
5714 /* We have not yet needed to make a label to jump to for tail-recursion. */
5715 tail_recursion_label = 0;
5716
5717 /* We haven't had a need to make a save area for ap yet. */
5718 arg_pointer_save_area = 0;
5719
5720 /* No stack slots allocated yet. */
5721 frame_offset = 0;
5722
5723 /* No SAVE_EXPRs in this function yet. */
5724 save_expr_regs = 0;
5725
5726 /* No RTL_EXPRs in this function yet. */
5727 rtl_expr_chain = 0;
5728
5729 /* Set up to allocate temporaries. */
5730 init_temp_slots ();
5731
5732 /* Indicate that we need to distinguish between the return value of the
5733 present function and the return value of a function being called. */
5734 rtx_equal_function_value_matters = 1;
5735
5736 /* Indicate that we have not instantiated virtual registers yet. */
5737 virtuals_instantiated = 0;
5738
5739 /* Indicate we have no need of a frame pointer yet. */
5740 frame_pointer_needed = 0;
5741
5742 /* By default assume not varargs or stdarg. */
5743 current_function_varargs = 0;
5744 current_function_stdarg = 0;
5745
5746 /* We haven't made any trampolines for this function yet. */
5747 trampoline_list = 0;
5748
5749 init_pending_stack_adjust ();
5750 inhibit_defer_pop = 0;
5751
5752 current_function_outgoing_args_size = 0;
5753
5754 if (init_lang_status)
5755 (*init_lang_status) (cfun);
5756 if (init_machine_status)
5757 (*init_machine_status) (cfun);
5758 }
5759
5760 /* Initialize the rtl expansion mechanism so that we can do simple things
5761 like generate sequences. This is used to provide a context during global
5762 initialization of some passes. */
5763 void
5764 init_dummy_function_start ()
5765 {
5766 prepare_function_start ();
5767 }
5768
5769 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5770 and initialize static variables for generating RTL for the statements
5771 of the function. */
5772
5773 void
5774 init_function_start (subr, filename, line)
5775 tree subr;
5776 char *filename;
5777 int line;
5778 {
5779 prepare_function_start ();
5780
5781 /* Remember this function for later. */
5782 cfun->next_global = all_functions;
5783 all_functions = cfun;
5784
5785 current_function_name = (*decl_printable_name) (subr, 2);
5786 cfun->decl = subr;
5787
5788 /* Nonzero if this is a nested function that uses a static chain. */
5789
5790 current_function_needs_context
5791 = (decl_function_context (current_function_decl) != 0
5792 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5793
5794 /* Within function body, compute a type's size as soon it is laid out. */
5795 immediate_size_expand++;
5796
5797 /* Prevent ever trying to delete the first instruction of a function.
5798 Also tell final how to output a linenum before the function prologue.
5799 Note linenums could be missing, e.g. when compiling a Java .class file. */
5800 if (line > 0)
5801 emit_line_note (filename, line);
5802
5803 /* Make sure first insn is a note even if we don't want linenums.
5804 This makes sure the first insn will never be deleted.
5805 Also, final expects a note to appear there. */
5806 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5807
5808 /* Set flags used by final.c. */
5809 if (aggregate_value_p (DECL_RESULT (subr)))
5810 {
5811 #ifdef PCC_STATIC_STRUCT_RETURN
5812 current_function_returns_pcc_struct = 1;
5813 #endif
5814 current_function_returns_struct = 1;
5815 }
5816
5817 /* Warn if this value is an aggregate type,
5818 regardless of which calling convention we are using for it. */
5819 if (warn_aggregate_return
5820 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5821 warning ("function returns an aggregate");
5822
5823 current_function_returns_pointer
5824 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5825 }
5826
5827 /* Make sure all values used by the optimization passes have sane
5828 defaults. */
5829 void
5830 init_function_for_compilation ()
5831 {
5832 reg_renumber = 0;
5833 /* No prologue/epilogue insns yet. */
5834 prologue = epilogue = 0;
5835 }
5836
5837 /* Indicate that the current function uses extra args
5838 not explicitly mentioned in the argument list in any fashion. */
5839
5840 void
5841 mark_varargs ()
5842 {
5843 current_function_varargs = 1;
5844 }
5845
5846 /* Expand a call to __main at the beginning of a possible main function. */
5847
5848 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5849 #undef HAS_INIT_SECTION
5850 #define HAS_INIT_SECTION
5851 #endif
5852
5853 void
5854 expand_main_function ()
5855 {
5856 #if !defined (HAS_INIT_SECTION)
5857 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5858 VOIDmode, 0);
5859 #endif /* not HAS_INIT_SECTION */
5860 }
5861 \f
5862 extern struct obstack permanent_obstack;
5863
5864 /* Start the RTL for a new function, and set variables used for
5865 emitting RTL.
5866 SUBR is the FUNCTION_DECL node.
5867 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5868 the function's parameters, which must be run at any return statement. */
5869
5870 void
5871 expand_function_start (subr, parms_have_cleanups)
5872 tree subr;
5873 int parms_have_cleanups;
5874 {
5875 tree tem;
5876 rtx last_ptr = NULL_RTX;
5877
5878 /* Make sure volatile mem refs aren't considered
5879 valid operands of arithmetic insns. */
5880 init_recog_no_volatile ();
5881
5882 /* Set this before generating any memory accesses. */
5883 current_function_check_memory_usage
5884 = (flag_check_memory_usage
5885 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5886
5887 current_function_instrument_entry_exit
5888 = (flag_instrument_function_entry_exit
5889 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5890
5891 current_function_limit_stack
5892 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5893
5894 /* If function gets a static chain arg, store it in the stack frame.
5895 Do this first, so it gets the first stack slot offset. */
5896 if (current_function_needs_context)
5897 {
5898 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5899
5900 /* Delay copying static chain if it is not a register to avoid
5901 conflicts with regs used for parameters. */
5902 if (! SMALL_REGISTER_CLASSES
5903 || GET_CODE (static_chain_incoming_rtx) == REG)
5904 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5905 }
5906
5907 /* If the parameters of this function need cleaning up, get a label
5908 for the beginning of the code which executes those cleanups. This must
5909 be done before doing anything with return_label. */
5910 if (parms_have_cleanups)
5911 cleanup_label = gen_label_rtx ();
5912 else
5913 cleanup_label = 0;
5914
5915 /* Make the label for return statements to jump to, if this machine
5916 does not have a one-instruction return and uses an epilogue,
5917 or if it returns a structure, or if it has parm cleanups. */
5918 #ifdef HAVE_return
5919 if (cleanup_label == 0 && HAVE_return
5920 && ! current_function_instrument_entry_exit
5921 && ! current_function_returns_pcc_struct
5922 && ! (current_function_returns_struct && ! optimize))
5923 return_label = 0;
5924 else
5925 return_label = gen_label_rtx ();
5926 #else
5927 return_label = gen_label_rtx ();
5928 #endif
5929
5930 /* Initialize rtx used to return the value. */
5931 /* Do this before assign_parms so that we copy the struct value address
5932 before any library calls that assign parms might generate. */
5933
5934 /* Decide whether to return the value in memory or in a register. */
5935 if (aggregate_value_p (DECL_RESULT (subr)))
5936 {
5937 /* Returning something that won't go in a register. */
5938 register rtx value_address = 0;
5939
5940 #ifdef PCC_STATIC_STRUCT_RETURN
5941 if (current_function_returns_pcc_struct)
5942 {
5943 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5944 value_address = assemble_static_space (size);
5945 }
5946 else
5947 #endif
5948 {
5949 /* Expect to be passed the address of a place to store the value.
5950 If it is passed as an argument, assign_parms will take care of
5951 it. */
5952 if (struct_value_incoming_rtx)
5953 {
5954 value_address = gen_reg_rtx (Pmode);
5955 emit_move_insn (value_address, struct_value_incoming_rtx);
5956 }
5957 }
5958 if (value_address)
5959 {
5960 DECL_RTL (DECL_RESULT (subr))
5961 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5962 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5963 AGGREGATE_TYPE_P (TREE_TYPE
5964 (DECL_RESULT
5965 (subr))));
5966 }
5967 }
5968 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5969 /* If return mode is void, this decl rtl should not be used. */
5970 DECL_RTL (DECL_RESULT (subr)) = 0;
5971 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5972 {
5973 /* If function will end with cleanup code for parms,
5974 compute the return values into a pseudo reg,
5975 which we will copy into the true return register
5976 after the cleanups are done. */
5977
5978 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5979
5980 #ifdef PROMOTE_FUNCTION_RETURN
5981 tree type = TREE_TYPE (DECL_RESULT (subr));
5982 int unsignedp = TREE_UNSIGNED (type);
5983
5984 mode = promote_mode (type, mode, &unsignedp, 1);
5985 #endif
5986
5987 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5988 }
5989 else
5990 /* Scalar, returned in a register. */
5991 {
5992 #ifdef FUNCTION_OUTGOING_VALUE
5993 DECL_RTL (DECL_RESULT (subr))
5994 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5995 #else
5996 DECL_RTL (DECL_RESULT (subr))
5997 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5998 #endif
5999
6000 /* Mark this reg as the function's return value. */
6001 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
6002 {
6003 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
6004 /* Needed because we may need to move this to memory
6005 in case it's a named return value whose address is taken. */
6006 DECL_REGISTER (DECL_RESULT (subr)) = 1;
6007 }
6008 }
6009
6010 /* Initialize rtx for parameters and local variables.
6011 In some cases this requires emitting insns. */
6012
6013 assign_parms (subr);
6014
6015 /* Copy the static chain now if it wasn't a register. The delay is to
6016 avoid conflicts with the parameter passing registers. */
6017
6018 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6019 if (GET_CODE (static_chain_incoming_rtx) != REG)
6020 emit_move_insn (last_ptr, static_chain_incoming_rtx);
6021
6022 /* The following was moved from init_function_start.
6023 The move is supposed to make sdb output more accurate. */
6024 /* Indicate the beginning of the function body,
6025 as opposed to parm setup. */
6026 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
6027
6028 if (GET_CODE (get_last_insn ()) != NOTE)
6029 emit_note (NULL_PTR, NOTE_INSN_DELETED);
6030 parm_birth_insn = get_last_insn ();
6031
6032 context_display = 0;
6033 if (current_function_needs_context)
6034 {
6035 /* Fetch static chain values for containing functions. */
6036 tem = decl_function_context (current_function_decl);
6037 /* Copy the static chain pointer into a pseudo. If we have
6038 small register classes, copy the value from memory if
6039 static_chain_incoming_rtx is a REG. */
6040 if (tem)
6041 {
6042 /* If the static chain originally came in a register, put it back
6043 there, then move it out in the next insn. The reason for
6044 this peculiar code is to satisfy function integration. */
6045 if (SMALL_REGISTER_CLASSES
6046 && GET_CODE (static_chain_incoming_rtx) == REG)
6047 emit_move_insn (static_chain_incoming_rtx, last_ptr);
6048 last_ptr = copy_to_reg (static_chain_incoming_rtx);
6049 }
6050
6051 while (tem)
6052 {
6053 tree rtlexp = make_node (RTL_EXPR);
6054
6055 RTL_EXPR_RTL (rtlexp) = last_ptr;
6056 context_display = tree_cons (tem, rtlexp, context_display);
6057 tem = decl_function_context (tem);
6058 if (tem == 0)
6059 break;
6060 /* Chain thru stack frames, assuming pointer to next lexical frame
6061 is found at the place we always store it. */
6062 #ifdef FRAME_GROWS_DOWNWARD
6063 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
6064 #endif
6065 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
6066 memory_address (Pmode,
6067 last_ptr)));
6068
6069 /* If we are not optimizing, ensure that we know that this
6070 piece of context is live over the entire function. */
6071 if (! optimize)
6072 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6073 save_expr_regs);
6074 }
6075 }
6076
6077 if (current_function_instrument_entry_exit)
6078 {
6079 rtx fun = DECL_RTL (current_function_decl);
6080 if (GET_CODE (fun) == MEM)
6081 fun = XEXP (fun, 0);
6082 else
6083 abort ();
6084 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
6085 fun, Pmode,
6086 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6087 0,
6088 hard_frame_pointer_rtx),
6089 Pmode);
6090 }
6091
6092 /* After the display initializations is where the tail-recursion label
6093 should go, if we end up needing one. Ensure we have a NOTE here
6094 since some things (like trampolines) get placed before this. */
6095 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
6096
6097 /* Evaluate now the sizes of any types declared among the arguments. */
6098 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
6099 {
6100 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
6101 EXPAND_MEMORY_USE_BAD);
6102 /* Flush the queue in case this parameter declaration has
6103 side-effects. */
6104 emit_queue ();
6105 }
6106
6107 /* Make sure there is a line number after the function entry setup code. */
6108 force_next_line_note ();
6109 }
6110 \f
6111 /* Undo the effects of init_dummy_function_start. */
6112 void
6113 expand_dummy_function_end ()
6114 {
6115 /* End any sequences that failed to be closed due to syntax errors. */
6116 while (in_sequence_p ())
6117 end_sequence ();
6118
6119 /* Outside function body, can't compute type's actual size
6120 until next function's body starts. */
6121
6122 free_after_parsing (cfun);
6123 free_after_compilation (cfun);
6124 free (cfun);
6125 cfun = 0;
6126 }
6127
6128 /* Call DOIT for each hard register used as a return value from
6129 the current function. */
6130
6131 void
6132 diddle_return_value (doit, arg)
6133 void (*doit) PARAMS ((rtx, void *));
6134 void *arg;
6135 {
6136 rtx outgoing = current_function_return_rtx;
6137
6138 if (! outgoing)
6139 return;
6140
6141 if (GET_CODE (outgoing) == REG
6142 && REGNO (outgoing) >= FIRST_PSEUDO_REGISTER)
6143 {
6144 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6145 #ifdef FUNCTION_OUTGOING_VALUE
6146 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
6147 #else
6148 outgoing = FUNCTION_VALUE (type, current_function_decl);
6149 #endif
6150 /* If this is a BLKmode structure being returned in registers, then use
6151 the mode computed in expand_return. */
6152 if (GET_MODE (outgoing) == BLKmode)
6153 PUT_MODE (outgoing,
6154 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6155 }
6156
6157 if (GET_CODE (outgoing) == REG)
6158 (*doit) (outgoing, arg);
6159 else if (GET_CODE (outgoing) == PARALLEL)
6160 {
6161 int i;
6162
6163 for (i = 0; i < XVECLEN (outgoing, 0); i++)
6164 {
6165 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6166
6167 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6168 (*doit) (x, arg);
6169 }
6170 }
6171 }
6172
6173 static void
6174 do_clobber_return_reg (reg, arg)
6175 rtx reg;
6176 void *arg ATTRIBUTE_UNUSED;
6177 {
6178 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6179 }
6180
6181 void
6182 clobber_return_register ()
6183 {
6184 diddle_return_value (do_clobber_return_reg, NULL);
6185 }
6186
6187 static void
6188 do_use_return_reg (reg, arg)
6189 rtx reg;
6190 void *arg ATTRIBUTE_UNUSED;
6191 {
6192 emit_insn (gen_rtx_USE (VOIDmode, reg));
6193 }
6194
6195 void
6196 use_return_register ()
6197 {
6198 diddle_return_value (do_use_return_reg, NULL);
6199 }
6200
6201 /* Generate RTL for the end of the current function.
6202 FILENAME and LINE are the current position in the source file.
6203
6204 It is up to language-specific callers to do cleanups for parameters--
6205 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
6206
6207 void
6208 expand_function_end (filename, line, end_bindings)
6209 char *filename;
6210 int line;
6211 int end_bindings;
6212 {
6213 tree link;
6214
6215 #ifdef TRAMPOLINE_TEMPLATE
6216 static rtx initial_trampoline;
6217 #endif
6218
6219 finish_expr_for_function ();
6220
6221 #ifdef NON_SAVING_SETJMP
6222 /* Don't put any variables in registers if we call setjmp
6223 on a machine that fails to restore the registers. */
6224 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6225 {
6226 if (DECL_INITIAL (current_function_decl) != error_mark_node)
6227 setjmp_protect (DECL_INITIAL (current_function_decl));
6228
6229 setjmp_protect_args ();
6230 }
6231 #endif
6232
6233 /* Save the argument pointer if a save area was made for it. */
6234 if (arg_pointer_save_area)
6235 {
6236 /* arg_pointer_save_area may not be a valid memory address, so we
6237 have to check it and fix it if necessary. */
6238 rtx seq;
6239 start_sequence ();
6240 emit_move_insn (validize_mem (arg_pointer_save_area),
6241 virtual_incoming_args_rtx);
6242 seq = gen_sequence ();
6243 end_sequence ();
6244 emit_insn_before (seq, tail_recursion_reentry);
6245 }
6246
6247 /* Initialize any trampolines required by this function. */
6248 for (link = trampoline_list; link; link = TREE_CHAIN (link))
6249 {
6250 tree function = TREE_PURPOSE (link);
6251 rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6252 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6253 #ifdef TRAMPOLINE_TEMPLATE
6254 rtx blktramp;
6255 #endif
6256 rtx seq;
6257
6258 #ifdef TRAMPOLINE_TEMPLATE
6259 /* First make sure this compilation has a template for
6260 initializing trampolines. */
6261 if (initial_trampoline == 0)
6262 {
6263 end_temporary_allocation ();
6264 initial_trampoline
6265 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6266 resume_temporary_allocation ();
6267
6268 ggc_add_rtx_root (&initial_trampoline, 1);
6269 }
6270 #endif
6271
6272 /* Generate insns to initialize the trampoline. */
6273 start_sequence ();
6274 tramp = round_trampoline_addr (XEXP (tramp, 0));
6275 #ifdef TRAMPOLINE_TEMPLATE
6276 blktramp = change_address (initial_trampoline, BLKmode, tramp);
6277 emit_block_move (blktramp, initial_trampoline,
6278 GEN_INT (TRAMPOLINE_SIZE),
6279 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
6280 #endif
6281 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6282 seq = get_insns ();
6283 end_sequence ();
6284
6285 /* Put those insns at entry to the containing function (this one). */
6286 emit_insns_before (seq, tail_recursion_reentry);
6287 }
6288
6289 /* If we are doing stack checking and this function makes calls,
6290 do a stack probe at the start of the function to ensure we have enough
6291 space for another stack frame. */
6292 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6293 {
6294 rtx insn, seq;
6295
6296 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6297 if (GET_CODE (insn) == CALL_INSN)
6298 {
6299 start_sequence ();
6300 probe_stack_range (STACK_CHECK_PROTECT,
6301 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6302 seq = get_insns ();
6303 end_sequence ();
6304 emit_insns_before (seq, tail_recursion_reentry);
6305 break;
6306 }
6307 }
6308
6309 /* Warn about unused parms if extra warnings were specified. */
6310 if (warn_unused && extra_warnings)
6311 {
6312 tree decl;
6313
6314 for (decl = DECL_ARGUMENTS (current_function_decl);
6315 decl; decl = TREE_CHAIN (decl))
6316 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6317 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6318 warning_with_decl (decl, "unused parameter `%s'");
6319 }
6320
6321 /* Delete handlers for nonlocal gotos if nothing uses them. */
6322 if (nonlocal_goto_handler_slots != 0
6323 && ! current_function_has_nonlocal_label)
6324 delete_handlers ();
6325
6326 /* End any sequences that failed to be closed due to syntax errors. */
6327 while (in_sequence_p ())
6328 end_sequence ();
6329
6330 /* Outside function body, can't compute type's actual size
6331 until next function's body starts. */
6332 immediate_size_expand--;
6333
6334 clear_pending_stack_adjust ();
6335 do_pending_stack_adjust ();
6336
6337 /* Mark the end of the function body.
6338 If control reaches this insn, the function can drop through
6339 without returning a value. */
6340 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6341
6342 /* Must mark the last line number note in the function, so that the test
6343 coverage code can avoid counting the last line twice. This just tells
6344 the code to ignore the immediately following line note, since there
6345 already exists a copy of this note somewhere above. This line number
6346 note is still needed for debugging though, so we can't delete it. */
6347 if (flag_test_coverage)
6348 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6349
6350 /* Output a linenumber for the end of the function.
6351 SDB depends on this. */
6352 emit_line_note_force (filename, line);
6353
6354 /* Output the label for the actual return from the function,
6355 if one is expected. This happens either because a function epilogue
6356 is used instead of a return instruction, or because a return was done
6357 with a goto in order to run local cleanups, or because of pcc-style
6358 structure returning. */
6359
6360 if (return_label)
6361 {
6362 /* Before the return label, clobber the return registers so that
6363 they are not propogated live to the rest of the function. This
6364 can only happen with functions that drop through; if there had
6365 been a return statement, there would have either been a return
6366 rtx, or a jump to the return label. */
6367 clobber_return_register ();
6368
6369 emit_label (return_label);
6370 }
6371
6372 /* C++ uses this. */
6373 if (end_bindings)
6374 expand_end_bindings (0, 0, 0);
6375
6376 /* Now handle any leftover exception regions that may have been
6377 created for the parameters. */
6378 {
6379 rtx last = get_last_insn ();
6380 rtx label;
6381
6382 expand_leftover_cleanups ();
6383
6384 /* If there are any catch_clauses remaining, output them now. */
6385 emit_insns (catch_clauses);
6386 catch_clauses = NULL_RTX;
6387 /* If the above emitted any code, may sure we jump around it. */
6388 if (last != get_last_insn ())
6389 {
6390 label = gen_label_rtx ();
6391 last = emit_jump_insn_after (gen_jump (label), last);
6392 last = emit_barrier_after (last);
6393 emit_label (label);
6394 }
6395 }
6396
6397 if (current_function_instrument_entry_exit)
6398 {
6399 rtx fun = DECL_RTL (current_function_decl);
6400 if (GET_CODE (fun) == MEM)
6401 fun = XEXP (fun, 0);
6402 else
6403 abort ();
6404 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6405 fun, Pmode,
6406 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6407 0,
6408 hard_frame_pointer_rtx),
6409 Pmode);
6410 }
6411
6412 /* If we had calls to alloca, and this machine needs
6413 an accurate stack pointer to exit the function,
6414 insert some code to save and restore the stack pointer. */
6415 #ifdef EXIT_IGNORE_STACK
6416 if (! EXIT_IGNORE_STACK)
6417 #endif
6418 if (current_function_calls_alloca)
6419 {
6420 rtx tem = 0;
6421
6422 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6423 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6424 }
6425
6426 /* If scalar return value was computed in a pseudo-reg,
6427 copy that to the hard return register. */
6428 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6429 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6430 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6431 >= FIRST_PSEUDO_REGISTER))
6432 {
6433 rtx real_decl_result;
6434
6435 #ifdef FUNCTION_OUTGOING_VALUE
6436 real_decl_result
6437 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6438 current_function_decl);
6439 #else
6440 real_decl_result
6441 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6442 current_function_decl);
6443 #endif
6444 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6445 /* If this is a BLKmode structure being returned in registers, then use
6446 the mode computed in expand_return. */
6447 if (GET_MODE (real_decl_result) == BLKmode)
6448 PUT_MODE (real_decl_result,
6449 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6450 emit_move_insn (real_decl_result,
6451 DECL_RTL (DECL_RESULT (current_function_decl)));
6452
6453 /* The delay slot scheduler assumes that current_function_return_rtx
6454 holds the hard register containing the return value, not a temporary
6455 pseudo. */
6456 current_function_return_rtx = real_decl_result;
6457 }
6458
6459 /* If returning a structure, arrange to return the address of the value
6460 in a place where debuggers expect to find it.
6461
6462 If returning a structure PCC style,
6463 the caller also depends on this value.
6464 And current_function_returns_pcc_struct is not necessarily set. */
6465 if (current_function_returns_struct
6466 || current_function_returns_pcc_struct)
6467 {
6468 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6469 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6470 #ifdef FUNCTION_OUTGOING_VALUE
6471 rtx outgoing
6472 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6473 current_function_decl);
6474 #else
6475 rtx outgoing
6476 = FUNCTION_VALUE (build_pointer_type (type),
6477 current_function_decl);
6478 #endif
6479
6480 /* Mark this as a function return value so integrate will delete the
6481 assignment and USE below when inlining this function. */
6482 REG_FUNCTION_VALUE_P (outgoing) = 1;
6483
6484 emit_move_insn (outgoing, value_address);
6485 }
6486
6487 /* ??? This should no longer be necessary since stupid is no longer with
6488 us, but there are some parts of the compiler (eg reload_combine, and
6489 sh mach_dep_reorg) that still try and compute their own lifetime info
6490 instead of using the general framework. */
6491 use_return_register ();
6492
6493 /* If this is an implementation of __throw, do what's necessary to
6494 communicate between __builtin_eh_return and the epilogue. */
6495 expand_eh_return ();
6496
6497 /* Output a return insn if we are using one.
6498 Otherwise, let the rtl chain end here, to drop through
6499 into the epilogue. */
6500
6501 #ifdef HAVE_return
6502 if (HAVE_return)
6503 {
6504 emit_jump_insn (gen_return ());
6505 emit_barrier ();
6506 }
6507 #endif
6508
6509 /* Fix up any gotos that jumped out to the outermost
6510 binding level of the function.
6511 Must follow emitting RETURN_LABEL. */
6512
6513 /* If you have any cleanups to do at this point,
6514 and they need to create temporary variables,
6515 then you will lose. */
6516 expand_fixups (get_insns ());
6517 }
6518 \f
6519 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6520 or a single insn). */
6521
6522 static int *
6523 record_insns (insns)
6524 rtx insns;
6525 {
6526 int *vec;
6527
6528 if (GET_CODE (insns) == SEQUENCE)
6529 {
6530 int len = XVECLEN (insns, 0);
6531 vec = (int *) oballoc ((len + 1) * sizeof (int));
6532 vec[len] = 0;
6533 while (--len >= 0)
6534 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6535 }
6536 else
6537 {
6538 vec = (int *) oballoc (2 * sizeof (int));
6539 vec[0] = INSN_UID (insns);
6540 vec[1] = 0;
6541 }
6542 return vec;
6543 }
6544
6545 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6546
6547 static int
6548 contains (insn, vec)
6549 rtx insn;
6550 int *vec;
6551 {
6552 register int i, j;
6553
6554 if (GET_CODE (insn) == INSN
6555 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6556 {
6557 int count = 0;
6558 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6559 for (j = 0; vec[j]; j++)
6560 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6561 count++;
6562 return count;
6563 }
6564 else
6565 {
6566 for (j = 0; vec[j]; j++)
6567 if (INSN_UID (insn) == vec[j])
6568 return 1;
6569 }
6570 return 0;
6571 }
6572
6573 int
6574 prologue_epilogue_contains (insn)
6575 rtx insn;
6576 {
6577 if (prologue && contains (insn, prologue))
6578 return 1;
6579 if (epilogue && contains (insn, epilogue))
6580 return 1;
6581 return 0;
6582 }
6583
6584 /* Insert gen_return at the end of block BB. This also means updating
6585 block_for_insn appropriately. */
6586
6587 static void
6588 emit_return_into_block (bb)
6589 basic_block bb;
6590 {
6591 rtx p, end;
6592
6593 end = emit_jump_insn_after (gen_return (), bb->end);
6594 p = NEXT_INSN (bb->end);
6595 while (1)
6596 {
6597 set_block_for_insn (p, bb);
6598 if (p == end)
6599 break;
6600 p = NEXT_INSN (p);
6601 }
6602 bb->end = end;
6603 }
6604
6605 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6606 this into place with notes indicating where the prologue ends and where
6607 the epilogue begins. Update the basic block information when possible. */
6608
6609 void
6610 thread_prologue_and_epilogue_insns (f)
6611 rtx f ATTRIBUTE_UNUSED;
6612 {
6613 int insertted = 0;
6614 edge e;
6615 rtx seq;
6616
6617 #ifdef HAVE_prologue
6618 if (HAVE_prologue)
6619 {
6620 start_sequence ();
6621 seq = gen_prologue();
6622 emit_insn (seq);
6623
6624 /* Retain a map of the prologue insns. */
6625 if (GET_CODE (seq) != SEQUENCE)
6626 seq = get_insns ();
6627 prologue = record_insns (seq);
6628
6629 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6630 seq = gen_sequence ();
6631 end_sequence ();
6632
6633 /* If optimization is off, and perhaps in an empty function,
6634 the entry block will have no successors. */
6635 if (ENTRY_BLOCK_PTR->succ)
6636 {
6637 /* Can't deal with multiple successsors of the entry block. */
6638 if (ENTRY_BLOCK_PTR->succ->succ_next)
6639 abort ();
6640
6641 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
6642 insertted = 1;
6643 }
6644 else
6645 emit_insn_after (seq, f);
6646 }
6647 #endif
6648
6649 /* If the exit block has no non-fake predecessors, we don't need
6650 an epilogue. */
6651 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6652 if ((e->flags & EDGE_FAKE) == 0)
6653 break;
6654 if (e == NULL)
6655 goto epilogue_done;
6656
6657 #ifdef HAVE_return
6658 if (optimize && HAVE_return)
6659 {
6660 /* If we're allowed to generate a simple return instruction,
6661 then by definition we don't need a full epilogue. Examine
6662 the block that falls through to EXIT. If it does not
6663 contain any code, examine its predecessors and try to
6664 emit (conditional) return instructions. */
6665
6666 basic_block last;
6667 edge e_next;
6668 rtx label;
6669
6670 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6671 if (e->flags & EDGE_FALLTHRU)
6672 break;
6673 if (e == NULL)
6674 goto epilogue_done;
6675 last = e->src;
6676
6677 /* Verify that there are no active instructions in the last block. */
6678 label = last->end;
6679 while (label && GET_CODE (label) != CODE_LABEL)
6680 {
6681 if (active_insn_p (label))
6682 break;
6683 label = PREV_INSN (label);
6684 }
6685
6686 if (last->head == label && GET_CODE (label) == CODE_LABEL)
6687 {
6688 for (e = last->pred; e ; e = e_next)
6689 {
6690 basic_block bb = e->src;
6691 rtx jump;
6692
6693 e_next = e->pred_next;
6694 if (bb == ENTRY_BLOCK_PTR)
6695 continue;
6696
6697 jump = bb->end;
6698 if (GET_CODE (jump) != JUMP_INSN)
6699 continue;
6700
6701 /* If we have an unconditional jump, we can replace that
6702 with a simple return instruction. */
6703 if (simplejump_p (jump))
6704 {
6705 emit_return_into_block (bb);
6706 flow_delete_insn (jump);
6707 }
6708
6709 /* If we have a conditional jump, we can try to replace
6710 that with a conditional return instruction. */
6711 else if (condjump_p (jump))
6712 {
6713 rtx ret, *loc;
6714
6715 ret = SET_SRC (PATTERN (jump));
6716 if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
6717 loc = &XEXP (ret, 1);
6718 else
6719 loc = &XEXP (ret, 2);
6720 ret = gen_rtx_RETURN (VOIDmode);
6721
6722 if (! validate_change (jump, loc, ret, 0))
6723 continue;
6724 if (JUMP_LABEL (jump))
6725 LABEL_NUSES (JUMP_LABEL (jump))--;
6726 }
6727 else
6728 continue;
6729
6730 /* Fix up the CFG for the successful change we just made. */
6731 remove_edge (e);
6732 make_edge (NULL, bb, EXIT_BLOCK_PTR, 0);
6733 }
6734 }
6735
6736 /* Emit a return insn for the exit fallthru block. Whether
6737 this is still reachable will be determined later. */
6738
6739 emit_barrier_after (last->end);
6740 emit_return_into_block (last);
6741 goto epilogue_done;
6742 }
6743 #endif
6744 #ifdef HAVE_epilogue
6745 if (HAVE_epilogue)
6746 {
6747 /* Find the edge that falls through to EXIT. Other edges may exist
6748 due to RETURN instructions, but those don't need epilogues.
6749 There really shouldn't be a mixture -- either all should have
6750 been converted or none, however... */
6751
6752 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
6753 if (e->flags & EDGE_FALLTHRU)
6754 break;
6755 if (e == NULL)
6756 goto epilogue_done;
6757
6758 start_sequence ();
6759 emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
6760
6761 seq = gen_epilogue ();
6762 emit_jump_insn (seq);
6763
6764 /* Retain a map of the epilogue insns. */
6765 if (GET_CODE (seq) != SEQUENCE)
6766 seq = get_insns ();
6767 epilogue = record_insns (seq);
6768
6769 seq = gen_sequence ();
6770 end_sequence();
6771
6772 insert_insn_on_edge (seq, e);
6773 insertted = 1;
6774 }
6775 #endif
6776 epilogue_done:
6777
6778 if (insertted)
6779 commit_edge_insertions ();
6780 }
6781
6782 /* Reposition the prologue-end and epilogue-begin notes after instruction
6783 scheduling and delayed branch scheduling. */
6784
6785 void
6786 reposition_prologue_and_epilogue_notes (f)
6787 rtx f ATTRIBUTE_UNUSED;
6788 {
6789 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6790 /* Reposition the prologue and epilogue notes. */
6791 if (n_basic_blocks)
6792 {
6793 int len;
6794
6795 if (prologue)
6796 {
6797 register rtx insn, note = 0;
6798
6799 /* Scan from the beginning until we reach the last prologue insn.
6800 We apparently can't depend on basic_block_{head,end} after
6801 reorg has run. */
6802 for (len = 0; prologue[len]; len++)
6803 ;
6804 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6805 {
6806 if (GET_CODE (insn) == NOTE)
6807 {
6808 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6809 note = insn;
6810 }
6811 else if ((len -= contains (insn, prologue)) == 0)
6812 {
6813 rtx next;
6814 /* Find the prologue-end note if we haven't already, and
6815 move it to just after the last prologue insn. */
6816 if (note == 0)
6817 {
6818 for (note = insn; (note = NEXT_INSN (note));)
6819 if (GET_CODE (note) == NOTE
6820 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6821 break;
6822 }
6823
6824 next = NEXT_INSN (note);
6825
6826 /* Whether or not we can depend on BLOCK_HEAD,
6827 attempt to keep it up-to-date. */
6828 if (BLOCK_HEAD (0) == note)
6829 BLOCK_HEAD (0) = next;
6830
6831 remove_insn (note);
6832 add_insn_after (note, insn);
6833 }
6834 }
6835 }
6836
6837 if (epilogue)
6838 {
6839 register rtx insn, note = 0;
6840
6841 /* Scan from the end until we reach the first epilogue insn.
6842 We apparently can't depend on basic_block_{head,end} after
6843 reorg has run. */
6844 for (len = 0; epilogue[len]; len++)
6845 ;
6846 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6847 {
6848 if (GET_CODE (insn) == NOTE)
6849 {
6850 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6851 note = insn;
6852 }
6853 else if ((len -= contains (insn, epilogue)) == 0)
6854 {
6855 /* Find the epilogue-begin note if we haven't already, and
6856 move it to just before the first epilogue insn. */
6857 if (note == 0)
6858 {
6859 for (note = insn; (note = PREV_INSN (note));)
6860 if (GET_CODE (note) == NOTE
6861 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6862 break;
6863 }
6864
6865 /* Whether or not we can depend on BLOCK_HEAD,
6866 attempt to keep it up-to-date. */
6867 if (n_basic_blocks
6868 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6869 BLOCK_HEAD (n_basic_blocks-1) = note;
6870
6871 remove_insn (note);
6872 add_insn_before (note, insn);
6873 }
6874 }
6875 }
6876 }
6877 #endif /* HAVE_prologue or HAVE_epilogue */
6878 }
6879
6880 /* Mark T for GC. */
6881
6882 static void
6883 mark_temp_slot (t)
6884 struct temp_slot *t;
6885 {
6886 while (t)
6887 {
6888 ggc_mark_rtx (t->slot);
6889 ggc_mark_rtx (t->address);
6890 ggc_mark_tree (t->rtl_expr);
6891
6892 t = t->next;
6893 }
6894 }
6895
6896 /* Mark P for GC. */
6897
6898 static void
6899 mark_function_status (p)
6900 struct function *p;
6901 {
6902 int i;
6903 rtx *r;
6904
6905 if (p == 0)
6906 return;
6907
6908 ggc_mark_rtx (p->arg_offset_rtx);
6909
6910 if (p->x_parm_reg_stack_loc)
6911 for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
6912 i > 0; --i, ++r)
6913 ggc_mark_rtx (*r);
6914
6915 ggc_mark_rtx (p->return_rtx);
6916 ggc_mark_rtx (p->x_cleanup_label);
6917 ggc_mark_rtx (p->x_return_label);
6918 ggc_mark_rtx (p->x_save_expr_regs);
6919 ggc_mark_rtx (p->x_stack_slot_list);
6920 ggc_mark_rtx (p->x_parm_birth_insn);
6921 ggc_mark_rtx (p->x_tail_recursion_label);
6922 ggc_mark_rtx (p->x_tail_recursion_reentry);
6923 ggc_mark_rtx (p->internal_arg_pointer);
6924 ggc_mark_rtx (p->x_arg_pointer_save_area);
6925 ggc_mark_tree (p->x_rtl_expr_chain);
6926 ggc_mark_rtx (p->x_last_parm_insn);
6927 ggc_mark_tree (p->x_context_display);
6928 ggc_mark_tree (p->x_trampoline_list);
6929 ggc_mark_rtx (p->epilogue_delay_list);
6930
6931 mark_temp_slot (p->x_temp_slots);
6932
6933 {
6934 struct var_refs_queue *q = p->fixup_var_refs_queue;
6935 while (q)
6936 {
6937 ggc_mark_rtx (q->modified);
6938 q = q->next;
6939 }
6940 }
6941
6942 ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
6943 ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
6944 ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
6945 ggc_mark_tree (p->x_nonlocal_labels);
6946 }
6947
6948 /* Mark the function chain ARG (which is really a struct function **)
6949 for GC. */
6950
6951 static void
6952 mark_function_chain (arg)
6953 void *arg;
6954 {
6955 struct function *f = *(struct function **) arg;
6956
6957 for (; f; f = f->next_global)
6958 {
6959 ggc_mark_tree (f->decl);
6960
6961 mark_function_status (f);
6962 mark_eh_status (f->eh);
6963 mark_stmt_status (f->stmt);
6964 mark_expr_status (f->expr);
6965 mark_emit_status (f->emit);
6966 mark_varasm_status (f->varasm);
6967
6968 if (mark_machine_status)
6969 (*mark_machine_status) (f);
6970 if (mark_lang_status)
6971 (*mark_lang_status) (f);
6972
6973 if (f->original_arg_vector)
6974 ggc_mark_rtvec ((rtvec) f->original_arg_vector);
6975 if (f->original_decl_initial)
6976 ggc_mark_tree (f->original_decl_initial);
6977 }
6978 }
6979
6980 /* Called once, at initialization, to initialize function.c. */
6981
6982 void
6983 init_function_once ()
6984 {
6985 ggc_add_root (&all_functions, 1, sizeof all_functions,
6986 mark_function_chain);
6987 }
This page took 0.335079 seconds and 6 git commands to generate.