]>
Commit | Line | Data |
---|---|---|
6f086dfc | 1 | /* Expands front end tree to back end RTL for GNU C-Compiler |
422c8f63 | 2 | Copyright (C) 1987, 88, 89, 91, 92, 93, 1994 Free Software Foundation, Inc. |
6f086dfc RS |
3 | |
4 | This file is part of GNU CC. | |
5 | ||
6 | GNU CC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8 | the Free Software Foundation; either version 2, or (at your option) | |
9 | any later version. | |
10 | ||
11 | GNU CC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GNU CC; see the file COPYING. If not, write to | |
18 | the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ | |
19 | ||
20 | ||
21 | /* This file handles the generation of rtl code from tree structure | |
22 | at the level of the function as a whole. | |
23 | It creates the rtl expressions for parameters and auto variables | |
24 | and has full responsibility for allocating stack slots. | |
25 | ||
26 | `expand_function_start' is called at the beginning of a function, | |
27 | before the function body is parsed, and `expand_function_end' is | |
28 | called after parsing the body. | |
29 | ||
30 | Call `assign_stack_local' to allocate a stack slot for a local variable. | |
31 | This is usually done during the RTL generation for the function body, | |
32 | but it can also be done in the reload pass when a pseudo-register does | |
33 | not get a hard register. | |
34 | ||
35 | Call `put_var_into_stack' when you learn, belatedly, that a variable | |
36 | previously given a pseudo-register must in fact go in the stack. | |
37 | This function changes the DECL_RTL to be a stack slot instead of a reg | |
38 | then scans all the RTL instructions so far generated to correct them. */ | |
39 | ||
40 | #include "config.h" | |
41 | ||
42 | #include <stdio.h> | |
43 | ||
44 | #include "rtl.h" | |
45 | #include "tree.h" | |
46 | #include "flags.h" | |
47 | #include "function.h" | |
48 | #include "insn-flags.h" | |
49 | #include "expr.h" | |
50 | #include "insn-codes.h" | |
51 | #include "regs.h" | |
52 | #include "hard-reg-set.h" | |
53 | #include "insn-config.h" | |
54 | #include "recog.h" | |
55 | #include "output.h" | |
bdac5f58 | 56 | #include "basic-block.h" |
c20bf1f3 JB |
57 | #include "obstack.h" |
58 | #include "bytecode.h" | |
6f086dfc | 59 | |
293e3de4 RS |
60 | /* Some systems use __main in a way incompatible with its use in gcc, in these |
61 | cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to | |
62 | give the same symbol without quotes for an alternative entry point. You | |
63 | must define both, or niether. */ | |
64 | #ifndef NAME__MAIN | |
65 | #define NAME__MAIN "__main" | |
66 | #define SYMBOL__MAIN __main | |
67 | #endif | |
68 | ||
6f086dfc RS |
69 | /* Round a value to the lowest integer less than it that is a multiple of |
70 | the required alignment. Avoid using division in case the value is | |
71 | negative. Assume the alignment is a power of two. */ | |
72 | #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1)) | |
73 | ||
74 | /* Similar, but round to the next highest integer that meets the | |
75 | alignment. */ | |
76 | #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1)) | |
77 | ||
78 | /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp | |
79 | during rtl generation. If they are different register numbers, this is | |
80 | always true. It may also be true if | |
81 | FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl | |
82 | generation. See fix_lexical_addr for details. */ | |
83 | ||
84 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
85 | #define NEED_SEPARATE_AP | |
86 | #endif | |
87 | ||
88 | /* Number of bytes of args popped by function being compiled on its return. | |
89 | Zero if no bytes are to be popped. | |
90 | May affect compilation of return insn or of function epilogue. */ | |
91 | ||
92 | int current_function_pops_args; | |
93 | ||
94 | /* Nonzero if function being compiled needs to be given an address | |
95 | where the value should be stored. */ | |
96 | ||
97 | int current_function_returns_struct; | |
98 | ||
99 | /* Nonzero if function being compiled needs to | |
100 | return the address of where it has put a structure value. */ | |
101 | ||
102 | int current_function_returns_pcc_struct; | |
103 | ||
104 | /* Nonzero if function being compiled needs to be passed a static chain. */ | |
105 | ||
106 | int current_function_needs_context; | |
107 | ||
108 | /* Nonzero if function being compiled can call setjmp. */ | |
109 | ||
110 | int current_function_calls_setjmp; | |
111 | ||
112 | /* Nonzero if function being compiled can call longjmp. */ | |
113 | ||
114 | int current_function_calls_longjmp; | |
115 | ||
116 | /* Nonzero if function being compiled receives nonlocal gotos | |
117 | from nested functions. */ | |
118 | ||
119 | int current_function_has_nonlocal_label; | |
120 | ||
8634413a JW |
121 | /* Nonzero if function being compiled has nonlocal gotos to parent |
122 | function. */ | |
123 | ||
124 | int current_function_has_nonlocal_goto; | |
125 | ||
6f086dfc RS |
126 | /* Nonzero if function being compiled contains nested functions. */ |
127 | ||
128 | int current_function_contains_functions; | |
129 | ||
130 | /* Nonzero if function being compiled can call alloca, | |
131 | either as a subroutine or builtin. */ | |
132 | ||
133 | int current_function_calls_alloca; | |
134 | ||
135 | /* Nonzero if the current function returns a pointer type */ | |
136 | ||
137 | int current_function_returns_pointer; | |
138 | ||
139 | /* If some insns can be deferred to the delay slots of the epilogue, the | |
140 | delay list for them is recorded here. */ | |
141 | ||
142 | rtx current_function_epilogue_delay_list; | |
143 | ||
144 | /* If function's args have a fixed size, this is that size, in bytes. | |
145 | Otherwise, it is -1. | |
146 | May affect compilation of return insn or of function epilogue. */ | |
147 | ||
148 | int current_function_args_size; | |
149 | ||
150 | /* # bytes the prologue should push and pretend that the caller pushed them. | |
151 | The prologue must do this, but only if parms can be passed in registers. */ | |
152 | ||
153 | int current_function_pretend_args_size; | |
154 | ||
155 | /* # of bytes of outgoing arguments required to be pushed by the prologue. | |
156 | If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined | |
157 | and no stack adjusts will be done on function calls. */ | |
158 | ||
159 | int current_function_outgoing_args_size; | |
160 | ||
161 | /* This is the offset from the arg pointer to the place where the first | |
162 | anonymous arg can be found, if there is one. */ | |
163 | ||
164 | rtx current_function_arg_offset_rtx; | |
165 | ||
166 | /* Nonzero if current function uses varargs.h or equivalent. | |
167 | Zero for functions that use stdarg.h. */ | |
168 | ||
169 | int current_function_varargs; | |
170 | ||
171 | /* Quantities of various kinds of registers | |
172 | used for the current function's args. */ | |
173 | ||
174 | CUMULATIVE_ARGS current_function_args_info; | |
175 | ||
176 | /* Name of function now being compiled. */ | |
177 | ||
178 | char *current_function_name; | |
179 | ||
180 | /* If non-zero, an RTL expression for that location at which the current | |
181 | function returns its result. Always equal to | |
182 | DECL_RTL (DECL_RESULT (current_function_decl)), but provided | |
183 | independently of the tree structures. */ | |
184 | ||
185 | rtx current_function_return_rtx; | |
186 | ||
187 | /* Nonzero if the current function uses the constant pool. */ | |
188 | ||
189 | int current_function_uses_const_pool; | |
190 | ||
191 | /* Nonzero if the current function uses pic_offset_table_rtx. */ | |
192 | int current_function_uses_pic_offset_table; | |
193 | ||
194 | /* The arg pointer hard register, or the pseudo into which it was copied. */ | |
195 | rtx current_function_internal_arg_pointer; | |
196 | ||
197 | /* The FUNCTION_DECL for an inline function currently being expanded. */ | |
198 | tree inline_function_decl; | |
199 | ||
200 | /* Number of function calls seen so far in current function. */ | |
201 | ||
202 | int function_call_count; | |
203 | ||
204 | /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels | |
205 | (labels to which there can be nonlocal gotos from nested functions) | |
206 | in this function. */ | |
207 | ||
208 | tree nonlocal_labels; | |
209 | ||
210 | /* RTX for stack slot that holds the current handler for nonlocal gotos. | |
211 | Zero when function does not have nonlocal labels. */ | |
212 | ||
213 | rtx nonlocal_goto_handler_slot; | |
214 | ||
215 | /* RTX for stack slot that holds the stack pointer value to restore | |
216 | for a nonlocal goto. | |
217 | Zero when function does not have nonlocal labels. */ | |
218 | ||
219 | rtx nonlocal_goto_stack_level; | |
220 | ||
221 | /* Label that will go on parm cleanup code, if any. | |
222 | Jumping to this label runs cleanup code for parameters, if | |
223 | such code must be run. Following this code is the logical return label. */ | |
224 | ||
225 | rtx cleanup_label; | |
226 | ||
227 | /* Label that will go on function epilogue. | |
228 | Jumping to this label serves as a "return" instruction | |
229 | on machines which require execution of the epilogue on all returns. */ | |
230 | ||
231 | rtx return_label; | |
232 | ||
233 | /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs. | |
234 | So we can mark them all live at the end of the function, if nonopt. */ | |
235 | rtx save_expr_regs; | |
236 | ||
237 | /* List (chain of EXPR_LISTs) of all stack slots in this function. | |
238 | Made for the sake of unshare_all_rtl. */ | |
239 | rtx stack_slot_list; | |
240 | ||
241 | /* Chain of all RTL_EXPRs that have insns in them. */ | |
242 | tree rtl_expr_chain; | |
243 | ||
244 | /* Label to jump back to for tail recursion, or 0 if we have | |
245 | not yet needed one for this function. */ | |
246 | rtx tail_recursion_label; | |
247 | ||
248 | /* Place after which to insert the tail_recursion_label if we need one. */ | |
249 | rtx tail_recursion_reentry; | |
250 | ||
251 | /* Location at which to save the argument pointer if it will need to be | |
252 | referenced. There are two cases where this is done: if nonlocal gotos | |
253 | exist, or if vars stored at an offset from the argument pointer will be | |
254 | needed by inner routines. */ | |
255 | ||
256 | rtx arg_pointer_save_area; | |
257 | ||
258 | /* Offset to end of allocated area of stack frame. | |
259 | If stack grows down, this is the address of the last stack slot allocated. | |
260 | If stack grows up, this is the address for the next slot. */ | |
261 | int frame_offset; | |
262 | ||
263 | /* List (chain of TREE_LISTs) of static chains for containing functions. | |
264 | Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx | |
265 | in an RTL_EXPR in the TREE_VALUE. */ | |
266 | static tree context_display; | |
267 | ||
268 | /* List (chain of TREE_LISTs) of trampolines for nested functions. | |
269 | The trampoline sets up the static chain and jumps to the function. | |
270 | We supply the trampoline's address when the function's address is requested. | |
271 | ||
272 | Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx | |
273 | in an RTL_EXPR in the TREE_VALUE. */ | |
274 | static tree trampoline_list; | |
275 | ||
276 | /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */ | |
277 | static rtx parm_birth_insn; | |
278 | ||
279 | #if 0 | |
280 | /* Nonzero if a stack slot has been generated whose address is not | |
281 | actually valid. It means that the generated rtl must all be scanned | |
282 | to detect and correct the invalid addresses where they occur. */ | |
283 | static int invalid_stack_slot; | |
284 | #endif | |
285 | ||
286 | /* Last insn of those whose job was to put parms into their nominal homes. */ | |
287 | static rtx last_parm_insn; | |
288 | ||
289 | /* 1 + last pseudo register number used for loading a copy | |
290 | of a parameter of this function. */ | |
291 | static int max_parm_reg; | |
292 | ||
293 | /* Vector indexed by REGNO, containing location on stack in which | |
294 | to put the parm which is nominally in pseudo register REGNO, | |
295 | if we discover that that parm must go in the stack. */ | |
296 | static rtx *parm_reg_stack_loc; | |
297 | ||
298 | #if 0 /* Turned off because 0 seems to work just as well. */ | |
299 | /* Cleanup lists are required for binding levels regardless of whether | |
300 | that binding level has cleanups or not. This node serves as the | |
301 | cleanup list whenever an empty list is required. */ | |
302 | static tree empty_cleanup_list; | |
303 | #endif | |
304 | ||
305 | /* Nonzero once virtual register instantiation has been done. | |
306 | assign_stack_local uses frame_pointer_rtx when this is nonzero. */ | |
307 | static int virtuals_instantiated; | |
308 | ||
46766466 RS |
309 | /* These variables hold pointers to functions to |
310 | save and restore machine-specific data, | |
311 | in push_function_context and pop_function_context. */ | |
312 | void (*save_machine_status) (); | |
313 | void (*restore_machine_status) (); | |
314 | ||
6f086dfc RS |
315 | /* Nonzero if we need to distinguish between the return value of this function |
316 | and the return value of a function called by this function. This helps | |
317 | integrate.c */ | |
318 | ||
319 | extern int rtx_equal_function_value_matters; | |
e7a84011 | 320 | extern tree sequence_rtl_expr; |
c20bf1f3 JB |
321 | extern tree bc_runtime_type_code (); |
322 | extern rtx bc_build_calldesc (); | |
323 | extern char *bc_emit_trampoline (); | |
324 | extern char *bc_end_function (); | |
6f086dfc RS |
325 | |
326 | void fixup_gotos (); | |
327 | ||
328 | static tree round_down (); | |
329 | static rtx round_trampoline_addr (); | |
330 | static rtx fixup_stack_1 (); | |
293e3de4 | 331 | static void put_reg_into_stack (); |
6f086dfc RS |
332 | static void fixup_var_refs (); |
333 | static void fixup_var_refs_insns (); | |
334 | static void fixup_var_refs_1 (); | |
335 | static void optimize_bit_field (); | |
336 | static void instantiate_decls (); | |
337 | static void instantiate_decls_1 (); | |
5a73491b | 338 | static void instantiate_decl (); |
6f086dfc RS |
339 | static int instantiate_virtual_regs_1 (); |
340 | static rtx fixup_memory_subreg (); | |
341 | static rtx walk_fixup_memory_subreg (); | |
342 | \f | |
343 | /* In order to evaluate some expressions, such as function calls returning | |
344 | structures in memory, we need to temporarily allocate stack locations. | |
345 | We record each allocated temporary in the following structure. | |
346 | ||
347 | Associated with each temporary slot is a nesting level. When we pop up | |
348 | one level, all temporaries associated with the previous level are freed. | |
349 | Normally, all temporaries are freed after the execution of the statement | |
350 | in which they were created. However, if we are inside a ({...}) grouping, | |
351 | the result may be in a temporary and hence must be preserved. If the | |
352 | result could be in a temporary, we preserve it if we can determine which | |
353 | one it is in. If we cannot determine which temporary may contain the | |
354 | result, all temporaries are preserved. A temporary is preserved by | |
355 | pretending it was allocated at the previous nesting level. | |
356 | ||
357 | Automatic variables are also assigned temporary slots, at the nesting | |
358 | level where they are defined. They are marked a "kept" so that | |
359 | free_temp_slots will not free them. */ | |
360 | ||
361 | struct temp_slot | |
362 | { | |
363 | /* Points to next temporary slot. */ | |
364 | struct temp_slot *next; | |
365 | /* The rtx to used to reference the slot. */ | |
366 | rtx slot; | |
e5e76139 RK |
367 | /* The rtx used to represent the address if not the address of the |
368 | slot above. May be an EXPR_LIST if multiple addresses exist. */ | |
369 | rtx address; | |
6f086dfc RS |
370 | /* The size, in units, of the slot. */ |
371 | int size; | |
e7a84011 RK |
372 | /* The value of `sequence_rtl_expr' when this temporary is allocated. */ |
373 | tree rtl_expr; | |
6f086dfc RS |
374 | /* Non-zero if this temporary is currently in use. */ |
375 | char in_use; | |
376 | /* Nesting level at which this slot is being used. */ | |
377 | int level; | |
378 | /* Non-zero if this should survive a call to free_temp_slots. */ | |
379 | int keep; | |
380 | }; | |
381 | ||
382 | /* List of all temporaries allocated, both available and in use. */ | |
383 | ||
384 | struct temp_slot *temp_slots; | |
385 | ||
386 | /* Current nesting level for temporaries. */ | |
387 | ||
388 | int temp_slot_level; | |
389 | \f | |
c20bf1f3 JB |
390 | /* The FUNCTION_DECL node for the current function. */ |
391 | static tree this_function_decl; | |
392 | ||
393 | /* Callinfo pointer for the current function. */ | |
394 | static rtx this_function_callinfo; | |
395 | ||
396 | /* The label in the bytecode file of this function's actual bytecode. | |
397 | Not an rtx. */ | |
398 | static char *this_function_bytecode; | |
399 | ||
400 | /* The call description vector for the current function. */ | |
401 | static rtx this_function_calldesc; | |
402 | ||
403 | /* Size of the local variables allocated for the current function. */ | |
404 | int local_vars_size; | |
405 | ||
406 | /* Current depth of the bytecode evaluation stack. */ | |
407 | int stack_depth; | |
408 | ||
409 | /* Maximum depth of the evaluation stack in this function. */ | |
410 | int max_stack_depth; | |
411 | ||
412 | /* Current depth in statement expressions. */ | |
413 | static int stmt_expr_depth; | |
414 | \f | |
6f086dfc RS |
415 | /* Pointer to chain of `struct function' for containing functions. */ |
416 | struct function *outer_function_chain; | |
417 | ||
418 | /* Given a function decl for a containing function, | |
419 | return the `struct function' for it. */ | |
420 | ||
421 | struct function * | |
422 | find_function_data (decl) | |
423 | tree decl; | |
424 | { | |
425 | struct function *p; | |
426 | for (p = outer_function_chain; p; p = p->next) | |
427 | if (p->decl == decl) | |
428 | return p; | |
429 | abort (); | |
430 | } | |
431 | ||
432 | /* Save the current context for compilation of a nested function. | |
433 | This is called from language-specific code. | |
434 | The caller is responsible for saving any language-specific status, | |
6dc42e49 | 435 | since this function knows only about language-independent variables. */ |
6f086dfc RS |
436 | |
437 | void | |
438 | push_function_context () | |
439 | { | |
440 | struct function *p = (struct function *) xmalloc (sizeof (struct function)); | |
441 | ||
442 | p->next = outer_function_chain; | |
443 | outer_function_chain = p; | |
444 | ||
445 | p->name = current_function_name; | |
446 | p->decl = current_function_decl; | |
447 | p->pops_args = current_function_pops_args; | |
448 | p->returns_struct = current_function_returns_struct; | |
449 | p->returns_pcc_struct = current_function_returns_pcc_struct; | |
450 | p->needs_context = current_function_needs_context; | |
451 | p->calls_setjmp = current_function_calls_setjmp; | |
452 | p->calls_longjmp = current_function_calls_longjmp; | |
453 | p->calls_alloca = current_function_calls_alloca; | |
454 | p->has_nonlocal_label = current_function_has_nonlocal_label; | |
8634413a | 455 | p->has_nonlocal_goto = current_function_has_nonlocal_goto; |
6f086dfc RS |
456 | p->args_size = current_function_args_size; |
457 | p->pretend_args_size = current_function_pretend_args_size; | |
458 | p->arg_offset_rtx = current_function_arg_offset_rtx; | |
459 | p->uses_const_pool = current_function_uses_const_pool; | |
460 | p->uses_pic_offset_table = current_function_uses_pic_offset_table; | |
461 | p->internal_arg_pointer = current_function_internal_arg_pointer; | |
462 | p->max_parm_reg = max_parm_reg; | |
463 | p->parm_reg_stack_loc = parm_reg_stack_loc; | |
464 | p->outgoing_args_size = current_function_outgoing_args_size; | |
465 | p->return_rtx = current_function_return_rtx; | |
466 | p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot; | |
467 | p->nonlocal_goto_stack_level = nonlocal_goto_stack_level; | |
468 | p->nonlocal_labels = nonlocal_labels; | |
469 | p->cleanup_label = cleanup_label; | |
470 | p->return_label = return_label; | |
471 | p->save_expr_regs = save_expr_regs; | |
472 | p->stack_slot_list = stack_slot_list; | |
473 | p->parm_birth_insn = parm_birth_insn; | |
474 | p->frame_offset = frame_offset; | |
475 | p->tail_recursion_label = tail_recursion_label; | |
476 | p->tail_recursion_reentry = tail_recursion_reentry; | |
477 | p->arg_pointer_save_area = arg_pointer_save_area; | |
478 | p->rtl_expr_chain = rtl_expr_chain; | |
479 | p->last_parm_insn = last_parm_insn; | |
480 | p->context_display = context_display; | |
481 | p->trampoline_list = trampoline_list; | |
482 | p->function_call_count = function_call_count; | |
483 | p->temp_slots = temp_slots; | |
484 | p->temp_slot_level = temp_slot_level; | |
485 | p->fixup_var_refs_queue = 0; | |
f979c996 | 486 | p->epilogue_delay_list = current_function_epilogue_delay_list; |
6f086dfc RS |
487 | |
488 | save_tree_status (p); | |
489 | save_storage_status (p); | |
490 | save_emit_status (p); | |
491 | init_emit (); | |
492 | save_expr_status (p); | |
493 | save_stmt_status (p); | |
a506307a | 494 | save_varasm_status (p); |
46766466 RS |
495 | |
496 | if (save_machine_status) | |
497 | (*save_machine_status) (p); | |
6f086dfc RS |
498 | } |
499 | ||
500 | /* Restore the last saved context, at the end of a nested function. | |
501 | This function is called from language-specific code. */ | |
502 | ||
503 | void | |
504 | pop_function_context () | |
505 | { | |
506 | struct function *p = outer_function_chain; | |
507 | ||
508 | outer_function_chain = p->next; | |
509 | ||
510 | current_function_name = p->name; | |
511 | current_function_decl = p->decl; | |
512 | current_function_pops_args = p->pops_args; | |
513 | current_function_returns_struct = p->returns_struct; | |
514 | current_function_returns_pcc_struct = p->returns_pcc_struct; | |
515 | current_function_needs_context = p->needs_context; | |
516 | current_function_calls_setjmp = p->calls_setjmp; | |
517 | current_function_calls_longjmp = p->calls_longjmp; | |
518 | current_function_calls_alloca = p->calls_alloca; | |
519 | current_function_has_nonlocal_label = p->has_nonlocal_label; | |
8634413a | 520 | current_function_has_nonlocal_goto = p->has_nonlocal_goto; |
6f086dfc RS |
521 | current_function_contains_functions = 1; |
522 | current_function_args_size = p->args_size; | |
523 | current_function_pretend_args_size = p->pretend_args_size; | |
524 | current_function_arg_offset_rtx = p->arg_offset_rtx; | |
525 | current_function_uses_const_pool = p->uses_const_pool; | |
526 | current_function_uses_pic_offset_table = p->uses_pic_offset_table; | |
527 | current_function_internal_arg_pointer = p->internal_arg_pointer; | |
528 | max_parm_reg = p->max_parm_reg; | |
529 | parm_reg_stack_loc = p->parm_reg_stack_loc; | |
530 | current_function_outgoing_args_size = p->outgoing_args_size; | |
531 | current_function_return_rtx = p->return_rtx; | |
532 | nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot; | |
533 | nonlocal_goto_stack_level = p->nonlocal_goto_stack_level; | |
534 | nonlocal_labels = p->nonlocal_labels; | |
535 | cleanup_label = p->cleanup_label; | |
536 | return_label = p->return_label; | |
537 | save_expr_regs = p->save_expr_regs; | |
538 | stack_slot_list = p->stack_slot_list; | |
539 | parm_birth_insn = p->parm_birth_insn; | |
540 | frame_offset = p->frame_offset; | |
541 | tail_recursion_label = p->tail_recursion_label; | |
542 | tail_recursion_reentry = p->tail_recursion_reentry; | |
543 | arg_pointer_save_area = p->arg_pointer_save_area; | |
544 | rtl_expr_chain = p->rtl_expr_chain; | |
545 | last_parm_insn = p->last_parm_insn; | |
546 | context_display = p->context_display; | |
547 | trampoline_list = p->trampoline_list; | |
548 | function_call_count = p->function_call_count; | |
549 | temp_slots = p->temp_slots; | |
550 | temp_slot_level = p->temp_slot_level; | |
f979c996 | 551 | current_function_epilogue_delay_list = p->epilogue_delay_list; |
6f086dfc RS |
552 | |
553 | restore_tree_status (p); | |
554 | restore_storage_status (p); | |
555 | restore_expr_status (p); | |
556 | restore_emit_status (p); | |
557 | restore_stmt_status (p); | |
a506307a | 558 | restore_varasm_status (p); |
6f086dfc | 559 | |
46766466 RS |
560 | if (restore_machine_status) |
561 | (*restore_machine_status) (p); | |
562 | ||
6f086dfc RS |
563 | /* Finish doing put_var_into_stack for any of our variables |
564 | which became addressable during the nested function. */ | |
565 | { | |
566 | struct var_refs_queue *queue = p->fixup_var_refs_queue; | |
567 | for (; queue; queue = queue->next) | |
00d8a4c1 | 568 | fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp); |
6f086dfc RS |
569 | } |
570 | ||
571 | free (p); | |
572 | ||
573 | /* Reset variables that have known state during rtx generation. */ | |
574 | rtx_equal_function_value_matters = 1; | |
575 | virtuals_instantiated = 0; | |
576 | } | |
577 | \f | |
578 | /* Allocate fixed slots in the stack frame of the current function. */ | |
579 | ||
580 | /* Return size needed for stack frame based on slots so far allocated. | |
581 | This size counts from zero. It is not rounded to STACK_BOUNDARY; | |
582 | the caller may have to do that. */ | |
583 | ||
584 | int | |
585 | get_frame_size () | |
586 | { | |
587 | #ifdef FRAME_GROWS_DOWNWARD | |
588 | return -frame_offset; | |
589 | #else | |
590 | return frame_offset; | |
591 | #endif | |
592 | } | |
593 | ||
594 | /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it | |
595 | with machine mode MODE. | |
596 | ||
597 | ALIGN controls the amount of alignment for the address of the slot: | |
598 | 0 means according to MODE, | |
599 | -1 means use BIGGEST_ALIGNMENT and round size to multiple of that, | |
600 | positive specifies alignment boundary in bits. | |
601 | ||
602 | We do not round to stack_boundary here. */ | |
603 | ||
604 | rtx | |
605 | assign_stack_local (mode, size, align) | |
606 | enum machine_mode mode; | |
607 | int size; | |
608 | int align; | |
609 | { | |
610 | register rtx x, addr; | |
611 | int bigend_correction = 0; | |
612 | int alignment; | |
613 | ||
614 | if (align == 0) | |
615 | { | |
616 | alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
617 | if (mode == BLKmode) | |
618 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
619 | } | |
620 | else if (align == -1) | |
621 | { | |
622 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
623 | size = CEIL_ROUND (size, alignment); | |
624 | } | |
625 | else | |
626 | alignment = align / BITS_PER_UNIT; | |
627 | ||
6f086dfc RS |
628 | /* Round frame offset to that alignment. |
629 | We must be careful here, since FRAME_OFFSET might be negative and | |
630 | division with a negative dividend isn't as well defined as we might | |
631 | like. So we instead assume that ALIGNMENT is a power of two and | |
632 | use logical operations which are unambiguous. */ | |
633 | #ifdef FRAME_GROWS_DOWNWARD | |
634 | frame_offset = FLOOR_ROUND (frame_offset, alignment); | |
635 | #else | |
636 | frame_offset = CEIL_ROUND (frame_offset, alignment); | |
637 | #endif | |
638 | ||
639 | /* On a big-endian machine, if we are allocating more space than we will use, | |
640 | use the least significant bytes of those that are allocated. */ | |
641 | #if BYTES_BIG_ENDIAN | |
642 | if (mode != BLKmode) | |
643 | bigend_correction = size - GET_MODE_SIZE (mode); | |
644 | #endif | |
645 | ||
646 | #ifdef FRAME_GROWS_DOWNWARD | |
647 | frame_offset -= size; | |
648 | #endif | |
649 | ||
650 | /* If we have already instantiated virtual registers, return the actual | |
651 | address relative to the frame pointer. */ | |
652 | if (virtuals_instantiated) | |
653 | addr = plus_constant (frame_pointer_rtx, | |
654 | (frame_offset + bigend_correction | |
655 | + STARTING_FRAME_OFFSET)); | |
656 | else | |
657 | addr = plus_constant (virtual_stack_vars_rtx, | |
658 | frame_offset + bigend_correction); | |
659 | ||
660 | #ifndef FRAME_GROWS_DOWNWARD | |
661 | frame_offset += size; | |
662 | #endif | |
663 | ||
664 | x = gen_rtx (MEM, mode, addr); | |
665 | ||
666 | stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list); | |
667 | ||
668 | return x; | |
669 | } | |
670 | ||
671 | /* Assign a stack slot in a containing function. | |
672 | First three arguments are same as in preceding function. | |
673 | The last argument specifies the function to allocate in. */ | |
674 | ||
675 | rtx | |
676 | assign_outer_stack_local (mode, size, align, function) | |
677 | enum machine_mode mode; | |
678 | int size; | |
679 | int align; | |
680 | struct function *function; | |
681 | { | |
682 | register rtx x, addr; | |
683 | int bigend_correction = 0; | |
684 | int alignment; | |
685 | ||
686 | /* Allocate in the memory associated with the function in whose frame | |
687 | we are assigning. */ | |
688 | push_obstacks (function->function_obstack, | |
689 | function->function_maybepermanent_obstack); | |
690 | ||
691 | if (align == 0) | |
692 | { | |
693 | alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
694 | if (mode == BLKmode) | |
695 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
696 | } | |
697 | else if (align == -1) | |
698 | { | |
699 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
700 | size = CEIL_ROUND (size, alignment); | |
701 | } | |
702 | else | |
703 | alignment = align / BITS_PER_UNIT; | |
704 | ||
6f086dfc RS |
705 | /* Round frame offset to that alignment. */ |
706 | #ifdef FRAME_GROWS_DOWNWARD | |
2af69b62 | 707 | function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment); |
6f086dfc | 708 | #else |
2af69b62 | 709 | function->frame_offset = CEIL_ROUND (function->frame_offset, alignment); |
6f086dfc RS |
710 | #endif |
711 | ||
712 | /* On a big-endian machine, if we are allocating more space than we will use, | |
713 | use the least significant bytes of those that are allocated. */ | |
714 | #if BYTES_BIG_ENDIAN | |
715 | if (mode != BLKmode) | |
716 | bigend_correction = size - GET_MODE_SIZE (mode); | |
717 | #endif | |
718 | ||
719 | #ifdef FRAME_GROWS_DOWNWARD | |
720 | function->frame_offset -= size; | |
721 | #endif | |
722 | addr = plus_constant (virtual_stack_vars_rtx, | |
723 | function->frame_offset + bigend_correction); | |
724 | #ifndef FRAME_GROWS_DOWNWARD | |
725 | function->frame_offset += size; | |
726 | #endif | |
727 | ||
728 | x = gen_rtx (MEM, mode, addr); | |
729 | ||
730 | function->stack_slot_list | |
731 | = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list); | |
732 | ||
733 | pop_obstacks (); | |
734 | ||
735 | return x; | |
736 | } | |
737 | \f | |
738 | /* Allocate a temporary stack slot and record it for possible later | |
739 | reuse. | |
740 | ||
741 | MODE is the machine mode to be given to the returned rtx. | |
742 | ||
743 | SIZE is the size in units of the space required. We do no rounding here | |
744 | since assign_stack_local will do any required rounding. | |
745 | ||
746 | KEEP is non-zero if this slot is to be retained after a call to | |
747 | free_temp_slots. Automatic variables for a block are allocated with this | |
748 | flag. */ | |
749 | ||
750 | rtx | |
751 | assign_stack_temp (mode, size, keep) | |
752 | enum machine_mode mode; | |
753 | int size; | |
754 | int keep; | |
755 | { | |
756 | struct temp_slot *p, *best_p = 0; | |
757 | ||
758 | /* First try to find an available, already-allocated temporary that is the | |
759 | exact size we require. */ | |
760 | for (p = temp_slots; p; p = p->next) | |
761 | if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use) | |
762 | break; | |
763 | ||
764 | /* If we didn't find, one, try one that is larger than what we want. We | |
765 | find the smallest such. */ | |
766 | if (p == 0) | |
767 | for (p = temp_slots; p; p = p->next) | |
768 | if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use | |
769 | && (best_p == 0 || best_p->size > p->size)) | |
770 | best_p = p; | |
771 | ||
772 | /* Make our best, if any, the one to use. */ | |
773 | if (best_p) | |
a45035b6 JW |
774 | { |
775 | /* If there are enough aligned bytes left over, make them into a new | |
776 | temp_slot so that the extra bytes don't get wasted. Do this only | |
777 | for BLKmode slots, so that we can be sure of the alignment. */ | |
778 | if (GET_MODE (best_p->slot) == BLKmode) | |
779 | { | |
780 | int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
781 | int rounded_size = CEIL_ROUND (size, alignment); | |
782 | ||
783 | if (best_p->size - rounded_size >= alignment) | |
784 | { | |
785 | p = (struct temp_slot *) oballoc (sizeof (struct temp_slot)); | |
786 | p->in_use = 0; | |
787 | p->size = best_p->size - rounded_size; | |
788 | p->slot = gen_rtx (MEM, BLKmode, | |
789 | plus_constant (XEXP (best_p->slot, 0), | |
790 | rounded_size)); | |
e5e76139 | 791 | p->address = 0; |
a45035b6 JW |
792 | p->next = temp_slots; |
793 | temp_slots = p; | |
794 | ||
795 | stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot, | |
796 | stack_slot_list); | |
797 | ||
798 | best_p->size = rounded_size; | |
799 | } | |
800 | } | |
801 | ||
802 | p = best_p; | |
803 | } | |
804 | ||
6f086dfc RS |
805 | /* If we still didn't find one, make a new temporary. */ |
806 | if (p == 0) | |
807 | { | |
808 | p = (struct temp_slot *) oballoc (sizeof (struct temp_slot)); | |
809 | p->size = size; | |
810 | /* If the temp slot mode doesn't indicate the alignment, | |
811 | use the largest possible, so no one will be disappointed. */ | |
e5e76139 RK |
812 | p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0); |
813 | p->address = 0; | |
6f086dfc RS |
814 | p->next = temp_slots; |
815 | temp_slots = p; | |
816 | } | |
817 | ||
818 | p->in_use = 1; | |
e7a84011 | 819 | p->rtl_expr = sequence_rtl_expr; |
6f086dfc RS |
820 | p->level = temp_slot_level; |
821 | p->keep = keep; | |
822 | return p->slot; | |
823 | } | |
a45035b6 JW |
824 | |
825 | /* Combine temporary stack slots which are adjacent on the stack. | |
826 | ||
827 | This allows for better use of already allocated stack space. This is only | |
828 | done for BLKmode slots because we can be sure that we won't have alignment | |
829 | problems in this case. */ | |
830 | ||
831 | void | |
832 | combine_temp_slots () | |
833 | { | |
834 | struct temp_slot *p, *q; | |
835 | struct temp_slot *prev_p, *prev_q; | |
e9b7093a RS |
836 | /* Determine where to free back to after this function. */ |
837 | rtx free_pointer = rtx_alloc (CONST_INT); | |
a45035b6 | 838 | |
e9b7093a RS |
839 | for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots) |
840 | { | |
841 | int delete_p = 0; | |
842 | if (! p->in_use && GET_MODE (p->slot) == BLKmode) | |
843 | for (q = p->next, prev_q = p; q; q = prev_q->next) | |
a45035b6 | 844 | { |
e9b7093a RS |
845 | int delete_q = 0; |
846 | if (! q->in_use && GET_MODE (q->slot) == BLKmode) | |
a45035b6 | 847 | { |
e9b7093a RS |
848 | if (rtx_equal_p (plus_constant (XEXP (p->slot, 0), p->size), |
849 | XEXP (q->slot, 0))) | |
850 | { | |
851 | /* Q comes after P; combine Q into P. */ | |
852 | p->size += q->size; | |
853 | delete_q = 1; | |
854 | } | |
855 | else if (rtx_equal_p (plus_constant (XEXP (q->slot, 0), q->size), | |
856 | XEXP (p->slot, 0))) | |
857 | { | |
858 | /* P comes after Q; combine P into Q. */ | |
859 | q->size += p->size; | |
860 | delete_p = 1; | |
861 | break; | |
862 | } | |
a45035b6 | 863 | } |
e9b7093a RS |
864 | /* Either delete Q or advance past it. */ |
865 | if (delete_q) | |
866 | prev_q->next = q->next; | |
867 | else | |
868 | prev_q = q; | |
a45035b6 | 869 | } |
e9b7093a RS |
870 | /* Either delete P or advance past it. */ |
871 | if (delete_p) | |
872 | { | |
873 | if (prev_p) | |
874 | prev_p->next = p->next; | |
875 | else | |
876 | temp_slots = p->next; | |
877 | } | |
878 | else | |
879 | prev_p = p; | |
880 | } | |
881 | ||
882 | /* Free all the RTL made by plus_constant. */ | |
883 | rtx_free (free_pointer); | |
a45035b6 | 884 | } |
6f086dfc | 885 | \f |
e5e76139 RK |
886 | /* Find the temp slot corresponding to the object at address X. */ |
887 | ||
888 | static struct temp_slot * | |
889 | find_temp_slot_from_address (x) | |
890 | rtx x; | |
891 | { | |
892 | struct temp_slot *p; | |
893 | rtx next; | |
894 | ||
895 | for (p = temp_slots; p; p = p->next) | |
896 | { | |
897 | if (! p->in_use) | |
898 | continue; | |
899 | else if (XEXP (p->slot, 0) == x | |
900 | || p->address == x) | |
901 | return p; | |
902 | ||
903 | else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST) | |
904 | for (next = p->address; next; next = XEXP (next, 1)) | |
905 | if (XEXP (next, 0) == x) | |
906 | return p; | |
907 | } | |
908 | ||
909 | return 0; | |
910 | } | |
911 | ||
912 | /* Indicate that NEW is an alternate way of refering to the temp slot | |
913 | that previous was known by OLD. */ | |
914 | ||
915 | void | |
916 | update_temp_slot_address (old, new) | |
917 | rtx old, new; | |
918 | { | |
919 | struct temp_slot *p = find_temp_slot_from_address (old); | |
920 | ||
921 | /* If none, return. Else add NEW as an alias. */ | |
922 | if (p == 0) | |
923 | return; | |
924 | else if (p->address == 0) | |
925 | p->address = new; | |
926 | else | |
927 | { | |
928 | if (GET_CODE (p->address) != EXPR_LIST) | |
929 | p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX); | |
930 | ||
931 | p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address); | |
932 | } | |
933 | } | |
934 | ||
6f086dfc RS |
935 | /* If X could be a reference to a temporary slot, mark that slot as belonging |
936 | to the to one level higher. If X matched one of our slots, just mark that | |
937 | one. Otherwise, we can't easily predict which it is, so upgrade all of | |
938 | them. Kept slots need not be touched. | |
939 | ||
940 | This is called when an ({...}) construct occurs and a statement | |
941 | returns a value in memory. */ | |
942 | ||
943 | void | |
944 | preserve_temp_slots (x) | |
945 | rtx x; | |
946 | { | |
947 | struct temp_slot *p; | |
948 | ||
e3a77161 RK |
949 | if (x == 0) |
950 | return; | |
951 | ||
952 | /* If X is a register that is being used as a pointer, see if we have | |
953 | a temporary slot we know it points to. To be consistent with | |
954 | the code below, we really should preserve all non-kept slots | |
955 | if we can't find a match, but that seems to be much too costly. */ | |
956 | if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)) | |
957 | && (p = find_temp_slot_from_address (x)) != 0) | |
958 | { | |
959 | p->level--; | |
960 | return; | |
961 | } | |
962 | ||
6f086dfc RS |
963 | /* If X is not in memory or is at a constant address, it cannot be in |
964 | a temporary slot. */ | |
e3a77161 | 965 | if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))) |
6f086dfc RS |
966 | return; |
967 | ||
968 | /* First see if we can find a match. */ | |
e5e76139 RK |
969 | p = find_temp_slot_from_address (XEXP (x, 0)); |
970 | if (p != 0) | |
971 | { | |
972 | p->level--; | |
973 | return; | |
974 | } | |
6f086dfc RS |
975 | |
976 | /* Otherwise, preserve all non-kept slots at this level. */ | |
977 | for (p = temp_slots; p; p = p->next) | |
978 | if (p->in_use && p->level == temp_slot_level && ! p->keep) | |
979 | p->level--; | |
980 | } | |
981 | ||
422c8f63 RK |
982 | /* X is the result of an RTL_EXPR. If it is a temporary slot associated |
983 | with that RTL_EXPR, promote it into a temporary slot at the present | |
984 | level so it will not be freed when we free slots made in the | |
985 | RTL_EXPR. */ | |
986 | ||
987 | void | |
988 | preserve_rtl_expr_result (x) | |
989 | rtx x; | |
990 | { | |
991 | struct temp_slot *p; | |
992 | ||
993 | /* If X is not in memory or is at a constant address, it cannot be in | |
994 | a temporary slot. */ | |
995 | if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))) | |
996 | return; | |
997 | ||
998 | /* If we can find a match, move it to our level. */ | |
999 | for (p = temp_slots; p; p = p->next) | |
1000 | if (p->in_use && rtx_equal_p (x, p->slot)) | |
1001 | { | |
1002 | p->level = temp_slot_level; | |
1003 | p->rtl_expr = 0; | |
1004 | return; | |
1005 | } | |
1006 | ||
1007 | return; | |
1008 | } | |
1009 | ||
6f086dfc | 1010 | /* Free all temporaries used so far. This is normally called at the end |
e7a84011 RK |
1011 | of generating code for a statement. Don't free any temporaries |
1012 | currently in use for an RTL_EXPR that hasn't yet been emitted. | |
1013 | We could eventually do better than this since it can be reused while | |
1014 | generating the same RTL_EXPR, but this is complex and probably not | |
1015 | worthwhile. */ | |
6f086dfc RS |
1016 | |
1017 | void | |
1018 | free_temp_slots () | |
1019 | { | |
1020 | struct temp_slot *p; | |
1021 | ||
1022 | for (p = temp_slots; p; p = p->next) | |
e7a84011 RK |
1023 | if (p->in_use && p->level == temp_slot_level && ! p->keep |
1024 | && p->rtl_expr == 0) | |
1025 | p->in_use = 0; | |
1026 | ||
1027 | combine_temp_slots (); | |
1028 | } | |
1029 | ||
1030 | /* Free all temporary slots used in T, an RTL_EXPR node. */ | |
1031 | ||
1032 | void | |
1033 | free_temps_for_rtl_expr (t) | |
1034 | tree t; | |
1035 | { | |
1036 | struct temp_slot *p; | |
1037 | ||
1038 | for (p = temp_slots; p; p = p->next) | |
1039 | if (p->rtl_expr == t) | |
6f086dfc | 1040 | p->in_use = 0; |
a45035b6 JW |
1041 | |
1042 | combine_temp_slots (); | |
6f086dfc RS |
1043 | } |
1044 | ||
1045 | /* Push deeper into the nesting level for stack temporaries. */ | |
1046 | ||
1047 | void | |
1048 | push_temp_slots () | |
1049 | { | |
6f086dfc RS |
1050 | temp_slot_level++; |
1051 | } | |
1052 | ||
1053 | /* Pop a temporary nesting level. All slots in use in the current level | |
1054 | are freed. */ | |
1055 | ||
1056 | void | |
1057 | pop_temp_slots () | |
1058 | { | |
1059 | struct temp_slot *p; | |
1060 | ||
6f086dfc | 1061 | for (p = temp_slots; p; p = p->next) |
e7a84011 | 1062 | if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0) |
6f086dfc RS |
1063 | p->in_use = 0; |
1064 | ||
a45035b6 JW |
1065 | combine_temp_slots (); |
1066 | ||
6f086dfc RS |
1067 | temp_slot_level--; |
1068 | } | |
1069 | \f | |
1070 | /* Retroactively move an auto variable from a register to a stack slot. | |
1071 | This is done when an address-reference to the variable is seen. */ | |
1072 | ||
1073 | void | |
1074 | put_var_into_stack (decl) | |
1075 | tree decl; | |
1076 | { | |
1077 | register rtx reg; | |
00d8a4c1 | 1078 | enum machine_mode promoted_mode, decl_mode; |
6f086dfc | 1079 | struct function *function = 0; |
c20bf1f3 JB |
1080 | tree context; |
1081 | ||
1082 | if (output_bytecode) | |
1083 | return; | |
1084 | ||
1085 | context = decl_function_context (decl); | |
6f086dfc | 1086 | |
00d8a4c1 | 1087 | /* Get the current rtl used for this object and it's original mode. */ |
6f086dfc | 1088 | reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl); |
2baccce2 RS |
1089 | |
1090 | /* No need to do anything if decl has no rtx yet | |
1091 | since in that case caller is setting TREE_ADDRESSABLE | |
1092 | and a stack slot will be assigned when the rtl is made. */ | |
1093 | if (reg == 0) | |
1094 | return; | |
00d8a4c1 RK |
1095 | |
1096 | /* Get the declared mode for this object. */ | |
1097 | decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl)) | |
1098 | : DECL_MODE (decl)); | |
2baccce2 RS |
1099 | /* Get the mode it's actually stored in. */ |
1100 | promoted_mode = GET_MODE (reg); | |
6f086dfc RS |
1101 | |
1102 | /* If this variable comes from an outer function, | |
1103 | find that function's saved context. */ | |
1104 | if (context != current_function_decl) | |
1105 | for (function = outer_function_chain; function; function = function->next) | |
1106 | if (function->decl == context) | |
1107 | break; | |
1108 | ||
6f086dfc RS |
1109 | /* If this is a variable-size object with a pseudo to address it, |
1110 | put that pseudo into the stack, if the var is nonlocal. */ | |
a82ad570 | 1111 | if (DECL_NONLOCAL (decl) |
6f086dfc RS |
1112 | && GET_CODE (reg) == MEM |
1113 | && GET_CODE (XEXP (reg, 0)) == REG | |
1114 | && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER) | |
4cdb3e78 RS |
1115 | { |
1116 | reg = XEXP (reg, 0); | |
1117 | decl_mode = promoted_mode = GET_MODE (reg); | |
1118 | } | |
e15762df | 1119 | |
293e3de4 RS |
1120 | /* Now we should have a value that resides in one or more pseudo regs. */ |
1121 | ||
1122 | if (GET_CODE (reg) == REG) | |
1123 | put_reg_into_stack (function, reg, TREE_TYPE (decl), | |
1124 | promoted_mode, decl_mode); | |
1125 | else if (GET_CODE (reg) == CONCAT) | |
1126 | { | |
1127 | /* A CONCAT contains two pseudos; put them both in the stack. | |
1128 | We do it so they end up consecutive. */ | |
1129 | enum machine_mode part_mode = GET_MODE (XEXP (reg, 0)); | |
1130 | tree part_type = TREE_TYPE (TREE_TYPE (decl)); | |
1131 | #ifdef STACK_GROWS_DOWNWARD | |
1132 | /* Since part 0 should have a lower address, do it second. */ | |
1133 | put_reg_into_stack (function, XEXP (reg, 1), | |
1134 | part_type, part_mode, part_mode); | |
1135 | put_reg_into_stack (function, XEXP (reg, 0), | |
1136 | part_type, part_mode, part_mode); | |
1137 | #else | |
1138 | put_reg_into_stack (function, XEXP (reg, 0), | |
1139 | part_type, part_mode, part_mode); | |
1140 | put_reg_into_stack (function, XEXP (reg, 1), | |
1141 | part_type, part_mode, part_mode); | |
1142 | #endif | |
1143 | ||
1144 | /* Change the CONCAT into a combined MEM for both parts. */ | |
1145 | PUT_CODE (reg, MEM); | |
1146 | /* The two parts are in memory order already. | |
1147 | Use the lower parts address as ours. */ | |
1148 | XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0); | |
1149 | /* Prevent sharing of rtl that might lose. */ | |
1150 | if (GET_CODE (XEXP (reg, 0)) == PLUS) | |
1151 | XEXP (reg, 0) = copy_rtx (XEXP (reg, 0)); | |
1152 | } | |
1153 | } | |
1154 | ||
1155 | /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG | |
1156 | into the stack frame of FUNCTION (0 means the current function). | |
1157 | DECL_MODE is the machine mode of the user-level data type. | |
1158 | PROMOTED_MODE is the machine mode of the register. */ | |
1159 | ||
1160 | static void | |
1161 | put_reg_into_stack (function, reg, type, promoted_mode, decl_mode) | |
1162 | struct function *function; | |
1163 | rtx reg; | |
1164 | tree type; | |
1165 | enum machine_mode promoted_mode, decl_mode; | |
1166 | { | |
1167 | rtx new = 0; | |
6f086dfc RS |
1168 | |
1169 | if (function) | |
1170 | { | |
1171 | if (REGNO (reg) < function->max_parm_reg) | |
1172 | new = function->parm_reg_stack_loc[REGNO (reg)]; | |
1173 | if (new == 0) | |
e15762df | 1174 | new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), |
6f086dfc RS |
1175 | 0, function); |
1176 | } | |
1177 | else | |
1178 | { | |
1179 | if (REGNO (reg) < max_parm_reg) | |
1180 | new = parm_reg_stack_loc[REGNO (reg)]; | |
1181 | if (new == 0) | |
e15762df | 1182 | new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0); |
6f086dfc RS |
1183 | } |
1184 | ||
1185 | XEXP (reg, 0) = XEXP (new, 0); | |
1186 | /* `volatil' bit means one thing for MEMs, another entirely for REGs. */ | |
1187 | REG_USERVAR_P (reg) = 0; | |
1188 | PUT_CODE (reg, MEM); | |
00d8a4c1 | 1189 | PUT_MODE (reg, decl_mode); |
6f086dfc RS |
1190 | |
1191 | /* If this is a memory ref that contains aggregate components, | |
1192 | mark it as such for cse and loop optimize. */ | |
1193 | MEM_IN_STRUCT_P (reg) | |
293e3de4 RS |
1194 | = (TREE_CODE (type) == ARRAY_TYPE |
1195 | || TREE_CODE (type) == RECORD_TYPE | |
1196 | || TREE_CODE (type) == UNION_TYPE | |
1197 | || TREE_CODE (type) == QUAL_UNION_TYPE); | |
6f086dfc RS |
1198 | |
1199 | /* Now make sure that all refs to the variable, previously made | |
1200 | when it was a register, are fixed up to be valid again. */ | |
1201 | if (function) | |
1202 | { | |
1203 | struct var_refs_queue *temp; | |
1204 | ||
1205 | /* Variable is inherited; fix it up when we get back to its function. */ | |
1206 | push_obstacks (function->function_obstack, | |
1207 | function->function_maybepermanent_obstack); | |
4da73fa0 RK |
1208 | |
1209 | /* See comment in restore_tree_status in tree.c for why this needs to be | |
1210 | on saveable obstack. */ | |
6f086dfc | 1211 | temp |
4da73fa0 | 1212 | = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue)); |
6f086dfc | 1213 | temp->modified = reg; |
00d8a4c1 | 1214 | temp->promoted_mode = promoted_mode; |
293e3de4 | 1215 | temp->unsignedp = TREE_UNSIGNED (type); |
6f086dfc RS |
1216 | temp->next = function->fixup_var_refs_queue; |
1217 | function->fixup_var_refs_queue = temp; | |
1218 | pop_obstacks (); | |
1219 | } | |
1220 | else | |
1221 | /* Variable is local; fix it up now. */ | |
293e3de4 | 1222 | fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type)); |
6f086dfc RS |
1223 | } |
1224 | \f | |
1225 | static void | |
00d8a4c1 | 1226 | fixup_var_refs (var, promoted_mode, unsignedp) |
6f086dfc | 1227 | rtx var; |
00d8a4c1 RK |
1228 | enum machine_mode promoted_mode; |
1229 | int unsignedp; | |
6f086dfc RS |
1230 | { |
1231 | tree pending; | |
1232 | rtx first_insn = get_insns (); | |
1233 | struct sequence_stack *stack = sequence_stack; | |
1234 | tree rtl_exps = rtl_expr_chain; | |
1235 | ||
1236 | /* Must scan all insns for stack-refs that exceed the limit. */ | |
00d8a4c1 | 1237 | fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0); |
6f086dfc RS |
1238 | |
1239 | /* Scan all pending sequences too. */ | |
1240 | for (; stack; stack = stack->next) | |
1241 | { | |
1242 | push_to_sequence (stack->first); | |
00d8a4c1 RK |
1243 | fixup_var_refs_insns (var, promoted_mode, unsignedp, |
1244 | stack->first, stack->next != 0); | |
6f086dfc RS |
1245 | /* Update remembered end of sequence |
1246 | in case we added an insn at the end. */ | |
1247 | stack->last = get_last_insn (); | |
1248 | end_sequence (); | |
1249 | } | |
1250 | ||
1251 | /* Scan all waiting RTL_EXPRs too. */ | |
1252 | for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending)) | |
1253 | { | |
1254 | rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending)); | |
1255 | if (seq != const0_rtx && seq != 0) | |
1256 | { | |
1257 | push_to_sequence (seq); | |
00d8a4c1 | 1258 | fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0); |
6f086dfc RS |
1259 | end_sequence (); |
1260 | } | |
1261 | } | |
1262 | } | |
1263 | \f | |
1264 | /* This structure is used by the following two functions to record MEMs or | |
1265 | pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing | |
1266 | VAR as an address. We need to maintain this list in case two operands of | |
1267 | an insn were required to match; in that case we must ensure we use the | |
1268 | same replacement. */ | |
1269 | ||
1270 | struct fixup_replacement | |
1271 | { | |
1272 | rtx old; | |
1273 | rtx new; | |
1274 | struct fixup_replacement *next; | |
1275 | }; | |
1276 | ||
1277 | /* REPLACEMENTS is a pointer to a list of the above structures and X is | |
1278 | some part of an insn. Return a struct fixup_replacement whose OLD | |
1279 | value is equal to X. Allocate a new structure if no such entry exists. */ | |
1280 | ||
1281 | static struct fixup_replacement * | |
2740a678 | 1282 | find_fixup_replacement (replacements, x) |
6f086dfc RS |
1283 | struct fixup_replacement **replacements; |
1284 | rtx x; | |
1285 | { | |
1286 | struct fixup_replacement *p; | |
1287 | ||
1288 | /* See if we have already replaced this. */ | |
1289 | for (p = *replacements; p && p->old != x; p = p->next) | |
1290 | ; | |
1291 | ||
1292 | if (p == 0) | |
1293 | { | |
1294 | p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement)); | |
1295 | p->old = x; | |
1296 | p->new = 0; | |
1297 | p->next = *replacements; | |
1298 | *replacements = p; | |
1299 | } | |
1300 | ||
1301 | return p; | |
1302 | } | |
1303 | ||
1304 | /* Scan the insn-chain starting with INSN for refs to VAR | |
1305 | and fix them up. TOPLEVEL is nonzero if this chain is the | |
1306 | main chain of insns for the current function. */ | |
1307 | ||
1308 | static void | |
00d8a4c1 | 1309 | fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel) |
6f086dfc | 1310 | rtx var; |
00d8a4c1 RK |
1311 | enum machine_mode promoted_mode; |
1312 | int unsignedp; | |
6f086dfc RS |
1313 | rtx insn; |
1314 | int toplevel; | |
1315 | { | |
02a10449 RK |
1316 | rtx call_dest = 0; |
1317 | ||
6f086dfc RS |
1318 | while (insn) |
1319 | { | |
1320 | rtx next = NEXT_INSN (insn); | |
1321 | rtx note; | |
e15762df | 1322 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') |
6f086dfc | 1323 | { |
63770d6a RK |
1324 | /* If this is a CLOBBER of VAR, delete it. |
1325 | ||
1326 | If it has a REG_LIBCALL note, delete the REG_LIBCALL | |
1327 | and REG_RETVAL notes too. */ | |
1328 | if (GET_CODE (PATTERN (insn)) == CLOBBER | |
1329 | && XEXP (PATTERN (insn), 0) == var) | |
1330 | { | |
1331 | if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0) | |
1332 | /* The REG_LIBCALL note will go away since we are going to | |
1333 | turn INSN into a NOTE, so just delete the | |
1334 | corresponding REG_RETVAL note. */ | |
1335 | remove_note (XEXP (note, 0), | |
1336 | find_reg_note (XEXP (note, 0), REG_RETVAL, | |
1337 | NULL_RTX)); | |
1338 | ||
1339 | /* In unoptimized compilation, we shouldn't call delete_insn | |
1340 | except in jump.c doing warnings. */ | |
1341 | PUT_CODE (insn, NOTE); | |
1342 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1343 | NOTE_SOURCE_FILE (insn) = 0; | |
1344 | } | |
1345 | ||
6f086dfc RS |
1346 | /* The insn to load VAR from a home in the arglist |
1347 | is now a no-op. When we see it, just delete it. */ | |
63770d6a RK |
1348 | else if (toplevel |
1349 | && GET_CODE (PATTERN (insn)) == SET | |
1350 | && SET_DEST (PATTERN (insn)) == var | |
1351 | /* If this represents the result of an insn group, | |
1352 | don't delete the insn. */ | |
1353 | && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0 | |
1354 | && rtx_equal_p (SET_SRC (PATTERN (insn)), var)) | |
6f086dfc | 1355 | { |
b4ff474c RS |
1356 | /* In unoptimized compilation, we shouldn't call delete_insn |
1357 | except in jump.c doing warnings. */ | |
1358 | PUT_CODE (insn, NOTE); | |
1359 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1360 | NOTE_SOURCE_FILE (insn) = 0; | |
6f086dfc RS |
1361 | if (insn == last_parm_insn) |
1362 | last_parm_insn = PREV_INSN (next); | |
1363 | } | |
1364 | else | |
1365 | { | |
02a10449 RK |
1366 | struct fixup_replacement *replacements = 0; |
1367 | rtx next_insn = NEXT_INSN (insn); | |
1368 | ||
1369 | #ifdef SMALL_REGISTER_CLASSES | |
1370 | /* If the insn that copies the results of a CALL_INSN | |
1371 | into a pseudo now references VAR, we have to use an | |
1372 | intermediate pseudo since we want the life of the | |
1373 | return value register to be only a single insn. | |
1374 | ||
1375 | If we don't use an intermediate pseudo, such things as | |
1376 | address computations to make the address of VAR valid | |
1377 | if it is not can be placed beween the CALL_INSN and INSN. | |
1378 | ||
1379 | To make sure this doesn't happen, we record the destination | |
1380 | of the CALL_INSN and see if the next insn uses both that | |
1381 | and VAR. */ | |
1382 | ||
1383 | if (call_dest != 0 && GET_CODE (insn) == INSN | |
1384 | && reg_mentioned_p (var, PATTERN (insn)) | |
1385 | && reg_mentioned_p (call_dest, PATTERN (insn))) | |
1386 | { | |
1387 | rtx temp = gen_reg_rtx (GET_MODE (call_dest)); | |
1388 | ||
1389 | emit_insn_before (gen_move_insn (temp, call_dest), insn); | |
1390 | ||
1391 | PATTERN (insn) = replace_rtx (PATTERN (insn), | |
1392 | call_dest, temp); | |
1393 | } | |
1394 | ||
1395 | if (GET_CODE (insn) == CALL_INSN | |
1396 | && GET_CODE (PATTERN (insn)) == SET) | |
1397 | call_dest = SET_DEST (PATTERN (insn)); | |
1398 | else if (GET_CODE (insn) == CALL_INSN | |
1399 | && GET_CODE (PATTERN (insn)) == PARALLEL | |
1400 | && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) | |
1401 | call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0)); | |
1402 | else | |
1403 | call_dest = 0; | |
1404 | #endif | |
1405 | ||
6f086dfc RS |
1406 | /* See if we have to do anything to INSN now that VAR is in |
1407 | memory. If it needs to be loaded into a pseudo, use a single | |
1408 | pseudo for the entire insn in case there is a MATCH_DUP | |
1409 | between two operands. We pass a pointer to the head of | |
1410 | a list of struct fixup_replacements. If fixup_var_refs_1 | |
1411 | needs to allocate pseudos or replacement MEMs (for SUBREGs), | |
1412 | it will record them in this list. | |
1413 | ||
1414 | If it allocated a pseudo for any replacement, we copy into | |
1415 | it here. */ | |
1416 | ||
00d8a4c1 RK |
1417 | fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn, |
1418 | &replacements); | |
6f086dfc | 1419 | |
77121fee JW |
1420 | /* If this is last_parm_insn, and any instructions were output |
1421 | after it to fix it up, then we must set last_parm_insn to | |
1422 | the last such instruction emitted. */ | |
1423 | if (insn == last_parm_insn) | |
1424 | last_parm_insn = PREV_INSN (next_insn); | |
1425 | ||
6f086dfc RS |
1426 | while (replacements) |
1427 | { | |
1428 | if (GET_CODE (replacements->new) == REG) | |
1429 | { | |
1430 | rtx insert_before; | |
00d8a4c1 | 1431 | rtx seq; |
6f086dfc RS |
1432 | |
1433 | /* OLD might be a (subreg (mem)). */ | |
1434 | if (GET_CODE (replacements->old) == SUBREG) | |
1435 | replacements->old | |
1436 | = fixup_memory_subreg (replacements->old, insn, 0); | |
1437 | else | |
1438 | replacements->old | |
1439 | = fixup_stack_1 (replacements->old, insn); | |
1440 | ||
1441 | /* We can not separate USE insns from the CALL_INSN | |
1442 | that they belong to. If this is a CALL_INSN, insert | |
b335c2cc | 1443 | the move insn before the USE insns preceding it |
6f086dfc RS |
1444 | instead of immediately before the insn. */ |
1445 | if (GET_CODE (insn) == CALL_INSN) | |
1446 | { | |
1447 | insert_before = insn; | |
1448 | while (GET_CODE (PREV_INSN (insert_before)) == INSN | |
1449 | && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE) | |
1450 | insert_before = PREV_INSN (insert_before); | |
1451 | } | |
1452 | else | |
1453 | insert_before = insn; | |
1454 | ||
00d8a4c1 RK |
1455 | /* If we are changing the mode, do a conversion. |
1456 | This might be wasteful, but combine.c will | |
1457 | eliminate much of the waste. */ | |
1458 | ||
1459 | if (GET_MODE (replacements->new) | |
1460 | != GET_MODE (replacements->old)) | |
1461 | { | |
1462 | start_sequence (); | |
1463 | convert_move (replacements->new, | |
1464 | replacements->old, unsignedp); | |
1465 | seq = gen_sequence (); | |
1466 | end_sequence (); | |
1467 | } | |
1468 | else | |
1469 | seq = gen_move_insn (replacements->new, | |
1470 | replacements->old); | |
1471 | ||
1472 | emit_insn_before (seq, insert_before); | |
6f086dfc RS |
1473 | } |
1474 | ||
1475 | replacements = replacements->next; | |
1476 | } | |
1477 | } | |
1478 | ||
1479 | /* Also fix up any invalid exprs in the REG_NOTES of this insn. | |
1480 | But don't touch other insns referred to by reg-notes; | |
1481 | we will get them elsewhere. */ | |
1482 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
1483 | if (GET_CODE (note) != INSN_LIST) | |
ab6155b7 RK |
1484 | XEXP (note, 0) |
1485 | = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1); | |
6f086dfc RS |
1486 | } |
1487 | insn = next; | |
1488 | } | |
1489 | } | |
1490 | \f | |
00d8a4c1 RK |
1491 | /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE. |
1492 | See if the rtx expression at *LOC in INSN needs to be changed. | |
6f086dfc RS |
1493 | |
1494 | REPLACEMENTS is a pointer to a list head that starts out zero, but may | |
1495 | contain a list of original rtx's and replacements. If we find that we need | |
1496 | to modify this insn by replacing a memory reference with a pseudo or by | |
1497 | making a new MEM to implement a SUBREG, we consult that list to see if | |
1498 | we have already chosen a replacement. If none has already been allocated, | |
1499 | we allocate it and update the list. fixup_var_refs_insns will copy VAR | |
1500 | or the SUBREG, as appropriate, to the pseudo. */ | |
1501 | ||
1502 | static void | |
00d8a4c1 | 1503 | fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements) |
6f086dfc | 1504 | register rtx var; |
00d8a4c1 | 1505 | enum machine_mode promoted_mode; |
6f086dfc RS |
1506 | register rtx *loc; |
1507 | rtx insn; | |
1508 | struct fixup_replacement **replacements; | |
1509 | { | |
1510 | register int i; | |
1511 | register rtx x = *loc; | |
1512 | RTX_CODE code = GET_CODE (x); | |
1513 | register char *fmt; | |
1514 | register rtx tem, tem1; | |
1515 | struct fixup_replacement *replacement; | |
1516 | ||
1517 | switch (code) | |
1518 | { | |
1519 | case MEM: | |
1520 | if (var == x) | |
1521 | { | |
1522 | /* If we already have a replacement, use it. Otherwise, | |
1523 | try to fix up this address in case it is invalid. */ | |
1524 | ||
2740a678 | 1525 | replacement = find_fixup_replacement (replacements, var); |
6f086dfc RS |
1526 | if (replacement->new) |
1527 | { | |
1528 | *loc = replacement->new; | |
1529 | return; | |
1530 | } | |
1531 | ||
1532 | *loc = replacement->new = x = fixup_stack_1 (x, insn); | |
1533 | ||
00d8a4c1 RK |
1534 | /* Unless we are forcing memory to register or we changed the mode, |
1535 | we can leave things the way they are if the insn is valid. */ | |
6f086dfc RS |
1536 | |
1537 | INSN_CODE (insn) = -1; | |
00d8a4c1 RK |
1538 | if (! flag_force_mem && GET_MODE (x) == promoted_mode |
1539 | && recog_memoized (insn) >= 0) | |
6f086dfc RS |
1540 | return; |
1541 | ||
00d8a4c1 | 1542 | *loc = replacement->new = gen_reg_rtx (promoted_mode); |
6f086dfc RS |
1543 | return; |
1544 | } | |
1545 | ||
1546 | /* If X contains VAR, we need to unshare it here so that we update | |
1547 | each occurrence separately. But all identical MEMs in one insn | |
1548 | must be replaced with the same rtx because of the possibility of | |
1549 | MATCH_DUPs. */ | |
1550 | ||
1551 | if (reg_mentioned_p (var, x)) | |
1552 | { | |
2740a678 | 1553 | replacement = find_fixup_replacement (replacements, x); |
6f086dfc RS |
1554 | if (replacement->new == 0) |
1555 | replacement->new = copy_most_rtx (x, var); | |
1556 | ||
1557 | *loc = x = replacement->new; | |
1558 | } | |
1559 | break; | |
1560 | ||
1561 | case REG: | |
1562 | case CC0: | |
1563 | case PC: | |
1564 | case CONST_INT: | |
1565 | case CONST: | |
1566 | case SYMBOL_REF: | |
1567 | case LABEL_REF: | |
1568 | case CONST_DOUBLE: | |
1569 | return; | |
1570 | ||
1571 | case SIGN_EXTRACT: | |
1572 | case ZERO_EXTRACT: | |
1573 | /* Note that in some cases those types of expressions are altered | |
1574 | by optimize_bit_field, and do not survive to get here. */ | |
1575 | if (XEXP (x, 0) == var | |
1576 | || (GET_CODE (XEXP (x, 0)) == SUBREG | |
1577 | && SUBREG_REG (XEXP (x, 0)) == var)) | |
1578 | { | |
1579 | /* Get TEM as a valid MEM in the mode presently in the insn. | |
1580 | ||
1581 | We don't worry about the possibility of MATCH_DUP here; it | |
1582 | is highly unlikely and would be tricky to handle. */ | |
1583 | ||
1584 | tem = XEXP (x, 0); | |
1585 | if (GET_CODE (tem) == SUBREG) | |
1586 | tem = fixup_memory_subreg (tem, insn, 1); | |
1587 | tem = fixup_stack_1 (tem, insn); | |
1588 | ||
1589 | /* Unless we want to load from memory, get TEM into the proper mode | |
1590 | for an extract from memory. This can only be done if the | |
1591 | extract is at a constant position and length. */ | |
1592 | ||
1593 | if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT | |
1594 | && GET_CODE (XEXP (x, 2)) == CONST_INT | |
1595 | && ! mode_dependent_address_p (XEXP (tem, 0)) | |
1596 | && ! MEM_VOLATILE_P (tem)) | |
1597 | { | |
1598 | enum machine_mode wanted_mode = VOIDmode; | |
1599 | enum machine_mode is_mode = GET_MODE (tem); | |
1600 | int width = INTVAL (XEXP (x, 1)); | |
1601 | int pos = INTVAL (XEXP (x, 2)); | |
1602 | ||
1603 | #ifdef HAVE_extzv | |
1604 | if (GET_CODE (x) == ZERO_EXTRACT) | |
1605 | wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1]; | |
1606 | #endif | |
1607 | #ifdef HAVE_extv | |
1608 | if (GET_CODE (x) == SIGN_EXTRACT) | |
1609 | wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1]; | |
1610 | #endif | |
6dc42e49 | 1611 | /* If we have a narrower mode, we can do something. */ |
6f086dfc RS |
1612 | if (wanted_mode != VOIDmode |
1613 | && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) | |
1614 | { | |
1615 | int offset = pos / BITS_PER_UNIT; | |
1616 | rtx old_pos = XEXP (x, 2); | |
1617 | rtx newmem; | |
1618 | ||
1619 | /* If the bytes and bits are counted differently, we | |
1620 | must adjust the offset. */ | |
1621 | #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN | |
1622 | offset = (GET_MODE_SIZE (is_mode) | |
1623 | - GET_MODE_SIZE (wanted_mode) - offset); | |
1624 | #endif | |
1625 | ||
1626 | pos %= GET_MODE_BITSIZE (wanted_mode); | |
1627 | ||
1628 | newmem = gen_rtx (MEM, wanted_mode, | |
1629 | plus_constant (XEXP (tem, 0), offset)); | |
1630 | RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem); | |
1631 | MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem); | |
1632 | MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem); | |
1633 | ||
1634 | /* Make the change and see if the insn remains valid. */ | |
1635 | INSN_CODE (insn) = -1; | |
1636 | XEXP (x, 0) = newmem; | |
5f4f0e22 | 1637 | XEXP (x, 2) = GEN_INT (pos); |
6f086dfc RS |
1638 | |
1639 | if (recog_memoized (insn) >= 0) | |
1640 | return; | |
1641 | ||
1642 | /* Otherwise, restore old position. XEXP (x, 0) will be | |
1643 | restored later. */ | |
1644 | XEXP (x, 2) = old_pos; | |
1645 | } | |
1646 | } | |
1647 | ||
1648 | /* If we get here, the bitfield extract insn can't accept a memory | |
1649 | reference. Copy the input into a register. */ | |
1650 | ||
1651 | tem1 = gen_reg_rtx (GET_MODE (tem)); | |
1652 | emit_insn_before (gen_move_insn (tem1, tem), insn); | |
1653 | XEXP (x, 0) = tem1; | |
1654 | return; | |
1655 | } | |
1656 | break; | |
1657 | ||
1658 | case SUBREG: | |
1659 | if (SUBREG_REG (x) == var) | |
1660 | { | |
00d8a4c1 RK |
1661 | /* If this is a special SUBREG made because VAR was promoted |
1662 | from a wider mode, replace it with VAR and call ourself | |
1663 | recursively, this time saying that the object previously | |
1664 | had its current mode (by virtue of the SUBREG). */ | |
1665 | ||
1666 | if (SUBREG_PROMOTED_VAR_P (x)) | |
1667 | { | |
1668 | *loc = var; | |
1669 | fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements); | |
1670 | return; | |
1671 | } | |
1672 | ||
6f086dfc RS |
1673 | /* If this SUBREG makes VAR wider, it has become a paradoxical |
1674 | SUBREG with VAR in memory, but these aren't allowed at this | |
1675 | stage of the compilation. So load VAR into a pseudo and take | |
1676 | a SUBREG of that pseudo. */ | |
1677 | if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var))) | |
1678 | { | |
2740a678 | 1679 | replacement = find_fixup_replacement (replacements, var); |
6f086dfc RS |
1680 | if (replacement->new == 0) |
1681 | replacement->new = gen_reg_rtx (GET_MODE (var)); | |
1682 | SUBREG_REG (x) = replacement->new; | |
1683 | return; | |
1684 | } | |
1685 | ||
1686 | /* See if we have already found a replacement for this SUBREG. | |
1687 | If so, use it. Otherwise, make a MEM and see if the insn | |
1688 | is recognized. If not, or if we should force MEM into a register, | |
1689 | make a pseudo for this SUBREG. */ | |
2740a678 | 1690 | replacement = find_fixup_replacement (replacements, x); |
6f086dfc RS |
1691 | if (replacement->new) |
1692 | { | |
1693 | *loc = replacement->new; | |
1694 | return; | |
1695 | } | |
1696 | ||
1697 | replacement->new = *loc = fixup_memory_subreg (x, insn, 0); | |
1698 | ||
f898f031 | 1699 | INSN_CODE (insn) = -1; |
6f086dfc RS |
1700 | if (! flag_force_mem && recog_memoized (insn) >= 0) |
1701 | return; | |
1702 | ||
1703 | *loc = replacement->new = gen_reg_rtx (GET_MODE (x)); | |
1704 | return; | |
1705 | } | |
1706 | break; | |
1707 | ||
1708 | case SET: | |
1709 | /* First do special simplification of bit-field references. */ | |
1710 | if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT | |
1711 | || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT) | |
1712 | optimize_bit_field (x, insn, 0); | |
1713 | if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT | |
1714 | || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT) | |
5f4f0e22 | 1715 | optimize_bit_field (x, insn, NULL_PTR); |
6f086dfc RS |
1716 | |
1717 | /* If SET_DEST is now a paradoxical SUBREG, put the result of this | |
1718 | insn into a pseudo and store the low part of the pseudo into VAR. */ | |
1719 | if (GET_CODE (SET_DEST (x)) == SUBREG | |
1720 | && SUBREG_REG (SET_DEST (x)) == var | |
1721 | && (GET_MODE_SIZE (GET_MODE (SET_DEST (x))) | |
1722 | > GET_MODE_SIZE (GET_MODE (var)))) | |
1723 | { | |
1724 | SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x))); | |
1725 | emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var), | |
1726 | tem)), | |
1727 | insn); | |
1728 | break; | |
1729 | } | |
1730 | ||
1731 | { | |
1732 | rtx dest = SET_DEST (x); | |
1733 | rtx src = SET_SRC (x); | |
1734 | rtx outerdest = dest; | |
1735 | ||
1736 | while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART | |
1737 | || GET_CODE (dest) == SIGN_EXTRACT | |
1738 | || GET_CODE (dest) == ZERO_EXTRACT) | |
1739 | dest = XEXP (dest, 0); | |
1740 | ||
1741 | if (GET_CODE (src) == SUBREG) | |
1742 | src = XEXP (src, 0); | |
1743 | ||
1744 | /* If VAR does not appear at the top level of the SET | |
1745 | just scan the lower levels of the tree. */ | |
1746 | ||
1747 | if (src != var && dest != var) | |
1748 | break; | |
1749 | ||
1750 | /* We will need to rerecognize this insn. */ | |
1751 | INSN_CODE (insn) = -1; | |
1752 | ||
1753 | #ifdef HAVE_insv | |
1754 | if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var) | |
1755 | { | |
1756 | /* Since this case will return, ensure we fixup all the | |
1757 | operands here. */ | |
00d8a4c1 RK |
1758 | fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1), |
1759 | insn, replacements); | |
1760 | fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2), | |
1761 | insn, replacements); | |
1762 | fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x), | |
1763 | insn, replacements); | |
6f086dfc RS |
1764 | |
1765 | tem = XEXP (outerdest, 0); | |
1766 | ||
1767 | /* Clean up (SUBREG:SI (MEM:mode ...) 0) | |
1768 | that may appear inside a ZERO_EXTRACT. | |
1769 | This was legitimate when the MEM was a REG. */ | |
1770 | if (GET_CODE (tem) == SUBREG | |
1771 | && SUBREG_REG (tem) == var) | |
1772 | tem = fixup_memory_subreg (tem, insn, 1); | |
1773 | else | |
1774 | tem = fixup_stack_1 (tem, insn); | |
1775 | ||
1776 | if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT | |
1777 | && GET_CODE (XEXP (outerdest, 2)) == CONST_INT | |
1778 | && ! mode_dependent_address_p (XEXP (tem, 0)) | |
1779 | && ! MEM_VOLATILE_P (tem)) | |
1780 | { | |
1781 | enum machine_mode wanted_mode | |
1782 | = insn_operand_mode[(int) CODE_FOR_insv][0]; | |
1783 | enum machine_mode is_mode = GET_MODE (tem); | |
1784 | int width = INTVAL (XEXP (outerdest, 1)); | |
1785 | int pos = INTVAL (XEXP (outerdest, 2)); | |
1786 | ||
6dc42e49 | 1787 | /* If we have a narrower mode, we can do something. */ |
6f086dfc RS |
1788 | if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) |
1789 | { | |
1790 | int offset = pos / BITS_PER_UNIT; | |
1791 | rtx old_pos = XEXP (outerdest, 2); | |
1792 | rtx newmem; | |
1793 | ||
1794 | #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN | |
1795 | offset = (GET_MODE_SIZE (is_mode) | |
1796 | - GET_MODE_SIZE (wanted_mode) - offset); | |
1797 | #endif | |
1798 | ||
1799 | pos %= GET_MODE_BITSIZE (wanted_mode); | |
1800 | ||
1801 | newmem = gen_rtx (MEM, wanted_mode, | |
1802 | plus_constant (XEXP (tem, 0), offset)); | |
1803 | RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem); | |
1804 | MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem); | |
1805 | MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem); | |
1806 | ||
1807 | /* Make the change and see if the insn remains valid. */ | |
1808 | INSN_CODE (insn) = -1; | |
1809 | XEXP (outerdest, 0) = newmem; | |
5f4f0e22 | 1810 | XEXP (outerdest, 2) = GEN_INT (pos); |
6f086dfc RS |
1811 | |
1812 | if (recog_memoized (insn) >= 0) | |
1813 | return; | |
1814 | ||
1815 | /* Otherwise, restore old position. XEXP (x, 0) will be | |
1816 | restored later. */ | |
1817 | XEXP (outerdest, 2) = old_pos; | |
1818 | } | |
1819 | } | |
1820 | ||
1821 | /* If we get here, the bit-field store doesn't allow memory | |
1822 | or isn't located at a constant position. Load the value into | |
1823 | a register, do the store, and put it back into memory. */ | |
1824 | ||
1825 | tem1 = gen_reg_rtx (GET_MODE (tem)); | |
1826 | emit_insn_before (gen_move_insn (tem1, tem), insn); | |
1827 | emit_insn_after (gen_move_insn (tem, tem1), insn); | |
1828 | XEXP (outerdest, 0) = tem1; | |
1829 | return; | |
1830 | } | |
1831 | #endif | |
1832 | ||
1833 | /* STRICT_LOW_PART is a no-op on memory references | |
1834 | and it can cause combinations to be unrecognizable, | |
1835 | so eliminate it. */ | |
1836 | ||
1837 | if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART) | |
1838 | SET_DEST (x) = XEXP (SET_DEST (x), 0); | |
1839 | ||
1840 | /* A valid insn to copy VAR into or out of a register | |
1841 | must be left alone, to avoid an infinite loop here. | |
1842 | If the reference to VAR is by a subreg, fix that up, | |
1843 | since SUBREG is not valid for a memref. | |
e15762df RK |
1844 | Also fix up the address of the stack slot. |
1845 | ||
1846 | Note that we must not try to recognize the insn until | |
1847 | after we know that we have valid addresses and no | |
1848 | (subreg (mem ...) ...) constructs, since these interfere | |
1849 | with determining the validity of the insn. */ | |
6f086dfc RS |
1850 | |
1851 | if ((SET_SRC (x) == var | |
1852 | || (GET_CODE (SET_SRC (x)) == SUBREG | |
1853 | && SUBREG_REG (SET_SRC (x)) == var)) | |
1854 | && (GET_CODE (SET_DEST (x)) == REG | |
1855 | || (GET_CODE (SET_DEST (x)) == SUBREG | |
1856 | && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)) | |
e15762df | 1857 | && x == single_set (PATTERN (insn))) |
6f086dfc | 1858 | { |
e15762df RK |
1859 | rtx pat; |
1860 | ||
2740a678 | 1861 | replacement = find_fixup_replacement (replacements, SET_SRC (x)); |
6f086dfc | 1862 | if (replacement->new) |
6f086dfc | 1863 | SET_SRC (x) = replacement->new; |
6f086dfc RS |
1864 | else if (GET_CODE (SET_SRC (x)) == SUBREG) |
1865 | SET_SRC (x) = replacement->new | |
1866 | = fixup_memory_subreg (SET_SRC (x), insn, 0); | |
1867 | else | |
1868 | SET_SRC (x) = replacement->new | |
1869 | = fixup_stack_1 (SET_SRC (x), insn); | |
e15762df RK |
1870 | |
1871 | if (recog_memoized (insn) >= 0) | |
1872 | return; | |
1873 | ||
1874 | /* INSN is not valid, but we know that we want to | |
1875 | copy SET_SRC (x) to SET_DEST (x) in some way. So | |
1876 | we generate the move and see whether it requires more | |
1877 | than one insn. If it does, we emit those insns and | |
1878 | delete INSN. Otherwise, we an just replace the pattern | |
1879 | of INSN; we have already verified above that INSN has | |
1880 | no other function that to do X. */ | |
1881 | ||
1882 | pat = gen_move_insn (SET_DEST (x), SET_SRC (x)); | |
1883 | if (GET_CODE (pat) == SEQUENCE) | |
1884 | { | |
1885 | emit_insn_after (pat, insn); | |
1886 | PUT_CODE (insn, NOTE); | |
1887 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1888 | NOTE_SOURCE_FILE (insn) = 0; | |
1889 | } | |
1890 | else | |
1891 | PATTERN (insn) = pat; | |
1892 | ||
6f086dfc RS |
1893 | return; |
1894 | } | |
1895 | ||
1896 | if ((SET_DEST (x) == var | |
1897 | || (GET_CODE (SET_DEST (x)) == SUBREG | |
1898 | && SUBREG_REG (SET_DEST (x)) == var)) | |
1899 | && (GET_CODE (SET_SRC (x)) == REG | |
1900 | || (GET_CODE (SET_SRC (x)) == SUBREG | |
1901 | && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG)) | |
e15762df | 1902 | && x == single_set (PATTERN (insn))) |
6f086dfc | 1903 | { |
e15762df RK |
1904 | rtx pat; |
1905 | ||
6f086dfc RS |
1906 | if (GET_CODE (SET_DEST (x)) == SUBREG) |
1907 | SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0); | |
1908 | else | |
1909 | SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn); | |
e15762df RK |
1910 | |
1911 | if (recog_memoized (insn) >= 0) | |
1912 | return; | |
1913 | ||
1914 | pat = gen_move_insn (SET_DEST (x), SET_SRC (x)); | |
1915 | if (GET_CODE (pat) == SEQUENCE) | |
1916 | { | |
1917 | emit_insn_after (pat, insn); | |
1918 | PUT_CODE (insn, NOTE); | |
1919 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1920 | NOTE_SOURCE_FILE (insn) = 0; | |
1921 | } | |
1922 | else | |
1923 | PATTERN (insn) = pat; | |
1924 | ||
6f086dfc RS |
1925 | return; |
1926 | } | |
1927 | ||
1928 | /* Otherwise, storing into VAR must be handled specially | |
1929 | by storing into a temporary and copying that into VAR | |
00d8a4c1 RK |
1930 | with a new insn after this one. Note that this case |
1931 | will be used when storing into a promoted scalar since | |
1932 | the insn will now have different modes on the input | |
1933 | and output and hence will be invalid (except for the case | |
1934 | of setting it to a constant, which does not need any | |
1935 | change if it is valid). We generate extra code in that case, | |
1936 | but combine.c will eliminate it. */ | |
6f086dfc RS |
1937 | |
1938 | if (dest == var) | |
1939 | { | |
1940 | rtx temp; | |
00d8a4c1 RK |
1941 | rtx fixeddest = SET_DEST (x); |
1942 | ||
6f086dfc | 1943 | /* STRICT_LOW_PART can be discarded, around a MEM. */ |
00d8a4c1 RK |
1944 | if (GET_CODE (fixeddest) == STRICT_LOW_PART) |
1945 | fixeddest = XEXP (fixeddest, 0); | |
6f086dfc | 1946 | /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */ |
00d8a4c1 RK |
1947 | if (GET_CODE (fixeddest) == SUBREG) |
1948 | fixeddest = fixup_memory_subreg (fixeddest, insn, 0); | |
6f086dfc | 1949 | else |
00d8a4c1 RK |
1950 | fixeddest = fixup_stack_1 (fixeddest, insn); |
1951 | ||
1952 | temp = gen_reg_rtx (GET_MODE (SET_SRC (x)) == VOIDmode | |
1953 | ? GET_MODE (fixeddest) | |
1954 | : GET_MODE (SET_SRC (x))); | |
1955 | ||
1956 | emit_insn_after (gen_move_insn (fixeddest, | |
1957 | gen_lowpart (GET_MODE (fixeddest), | |
1958 | temp)), | |
1959 | insn); | |
6f086dfc | 1960 | |
6f086dfc RS |
1961 | SET_DEST (x) = temp; |
1962 | } | |
1963 | } | |
1964 | } | |
1965 | ||
1966 | /* Nothing special about this RTX; fix its operands. */ | |
1967 | ||
1968 | fmt = GET_RTX_FORMAT (code); | |
1969 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1970 | { | |
1971 | if (fmt[i] == 'e') | |
00d8a4c1 | 1972 | fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements); |
6f086dfc RS |
1973 | if (fmt[i] == 'E') |
1974 | { | |
1975 | register int j; | |
1976 | for (j = 0; j < XVECLEN (x, i); j++) | |
00d8a4c1 RK |
1977 | fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j), |
1978 | insn, replacements); | |
6f086dfc RS |
1979 | } |
1980 | } | |
1981 | } | |
1982 | \f | |
1983 | /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)), | |
1984 | return an rtx (MEM:m1 newaddr) which is equivalent. | |
1985 | If any insns must be emitted to compute NEWADDR, put them before INSN. | |
1986 | ||
1987 | UNCRITICAL nonzero means accept paradoxical subregs. | |
ab6155b7 | 1988 | This is used for subregs found inside of ZERO_EXTRACTs and in REG_NOTES. */ |
6f086dfc RS |
1989 | |
1990 | static rtx | |
1991 | fixup_memory_subreg (x, insn, uncritical) | |
1992 | rtx x; | |
1993 | rtx insn; | |
1994 | int uncritical; | |
1995 | { | |
1996 | int offset = SUBREG_WORD (x) * UNITS_PER_WORD; | |
1997 | rtx addr = XEXP (SUBREG_REG (x), 0); | |
1998 | enum machine_mode mode = GET_MODE (x); | |
1999 | rtx saved, result; | |
2000 | ||
2001 | /* Paradoxical SUBREGs are usually invalid during RTL generation. */ | |
2002 | if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) | |
2003 | && ! uncritical) | |
2004 | abort (); | |
2005 | ||
2006 | #if BYTES_BIG_ENDIAN | |
2007 | offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
2008 | - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))); | |
2009 | #endif | |
2010 | addr = plus_constant (addr, offset); | |
2011 | if (!flag_force_addr && memory_address_p (mode, addr)) | |
2012 | /* Shortcut if no insns need be emitted. */ | |
2013 | return change_address (SUBREG_REG (x), mode, addr); | |
2014 | start_sequence (); | |
2015 | result = change_address (SUBREG_REG (x), mode, addr); | |
2016 | emit_insn_before (gen_sequence (), insn); | |
2017 | end_sequence (); | |
2018 | return result; | |
2019 | } | |
2020 | ||
2021 | /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X. | |
2022 | Replace subexpressions of X in place. | |
2023 | If X itself is a (SUBREG (MEM ...) ...), return the replacement expression. | |
2024 | Otherwise return X, with its contents possibly altered. | |
2025 | ||
ab6155b7 RK |
2026 | If any insns must be emitted to compute NEWADDR, put them before INSN. |
2027 | ||
2028 | UNCRITICAL is as in fixup_memory_subreg. */ | |
6f086dfc RS |
2029 | |
2030 | static rtx | |
ab6155b7 | 2031 | walk_fixup_memory_subreg (x, insn, uncritical) |
6f086dfc RS |
2032 | register rtx x; |
2033 | rtx insn; | |
ab6155b7 | 2034 | int uncritical; |
6f086dfc RS |
2035 | { |
2036 | register enum rtx_code code; | |
2037 | register char *fmt; | |
2038 | register int i; | |
2039 | ||
2040 | if (x == 0) | |
2041 | return 0; | |
2042 | ||
2043 | code = GET_CODE (x); | |
2044 | ||
2045 | if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM) | |
ab6155b7 | 2046 | return fixup_memory_subreg (x, insn, uncritical); |
6f086dfc RS |
2047 | |
2048 | /* Nothing special about this RTX; fix its operands. */ | |
2049 | ||
2050 | fmt = GET_RTX_FORMAT (code); | |
2051 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2052 | { | |
2053 | if (fmt[i] == 'e') | |
ab6155b7 | 2054 | XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical); |
6f086dfc RS |
2055 | if (fmt[i] == 'E') |
2056 | { | |
2057 | register int j; | |
2058 | for (j = 0; j < XVECLEN (x, i); j++) | |
2059 | XVECEXP (x, i, j) | |
ab6155b7 | 2060 | = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical); |
6f086dfc RS |
2061 | } |
2062 | } | |
2063 | return x; | |
2064 | } | |
2065 | \f | |
2066 | #if 0 | |
2067 | /* Fix up any references to stack slots that are invalid memory addresses | |
2068 | because they exceed the maximum range of a displacement. */ | |
2069 | ||
2070 | void | |
2071 | fixup_stack_slots () | |
2072 | { | |
2073 | register rtx insn; | |
2074 | ||
2075 | /* Did we generate a stack slot that is out of range | |
2076 | or otherwise has an invalid address? */ | |
2077 | if (invalid_stack_slot) | |
2078 | { | |
2079 | /* Yes. Must scan all insns for stack-refs that exceed the limit. */ | |
2080 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
2081 | if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN | |
2082 | || GET_CODE (insn) == JUMP_INSN) | |
2083 | fixup_stack_1 (PATTERN (insn), insn); | |
2084 | } | |
2085 | } | |
2086 | #endif | |
2087 | ||
2088 | /* For each memory ref within X, if it refers to a stack slot | |
2089 | with an out of range displacement, put the address in a temp register | |
2090 | (emitting new insns before INSN to load these registers) | |
2091 | and alter the memory ref to use that register. | |
2092 | Replace each such MEM rtx with a copy, to avoid clobberage. */ | |
2093 | ||
2094 | static rtx | |
2095 | fixup_stack_1 (x, insn) | |
2096 | rtx x; | |
2097 | rtx insn; | |
2098 | { | |
2099 | register int i; | |
2100 | register RTX_CODE code = GET_CODE (x); | |
2101 | register char *fmt; | |
2102 | ||
2103 | if (code == MEM) | |
2104 | { | |
2105 | register rtx ad = XEXP (x, 0); | |
2106 | /* If we have address of a stack slot but it's not valid | |
2107 | (displacement is too large), compute the sum in a register. */ | |
2108 | if (GET_CODE (ad) == PLUS | |
2109 | && GET_CODE (XEXP (ad, 0)) == REG | |
40d05551 RK |
2110 | && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER |
2111 | && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER) | |
2112 | || XEXP (ad, 0) == current_function_internal_arg_pointer) | |
6f086dfc RS |
2113 | && GET_CODE (XEXP (ad, 1)) == CONST_INT) |
2114 | { | |
2115 | rtx temp, seq; | |
2116 | if (memory_address_p (GET_MODE (x), ad)) | |
2117 | return x; | |
2118 | ||
2119 | start_sequence (); | |
2120 | temp = copy_to_reg (ad); | |
2121 | seq = gen_sequence (); | |
2122 | end_sequence (); | |
2123 | emit_insn_before (seq, insn); | |
2124 | return change_address (x, VOIDmode, temp); | |
2125 | } | |
2126 | return x; | |
2127 | } | |
2128 | ||
2129 | fmt = GET_RTX_FORMAT (code); | |
2130 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2131 | { | |
2132 | if (fmt[i] == 'e') | |
2133 | XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn); | |
2134 | if (fmt[i] == 'E') | |
2135 | { | |
2136 | register int j; | |
2137 | for (j = 0; j < XVECLEN (x, i); j++) | |
2138 | XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn); | |
2139 | } | |
2140 | } | |
2141 | return x; | |
2142 | } | |
2143 | \f | |
2144 | /* Optimization: a bit-field instruction whose field | |
2145 | happens to be a byte or halfword in memory | |
2146 | can be changed to a move instruction. | |
2147 | ||
2148 | We call here when INSN is an insn to examine or store into a bit-field. | |
2149 | BODY is the SET-rtx to be altered. | |
2150 | ||
2151 | EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0. | |
2152 | (Currently this is called only from function.c, and EQUIV_MEM | |
2153 | is always 0.) */ | |
2154 | ||
2155 | static void | |
2156 | optimize_bit_field (body, insn, equiv_mem) | |
2157 | rtx body; | |
2158 | rtx insn; | |
2159 | rtx *equiv_mem; | |
2160 | { | |
2161 | register rtx bitfield; | |
2162 | int destflag; | |
2163 | rtx seq = 0; | |
2164 | enum machine_mode mode; | |
2165 | ||
2166 | if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT | |
2167 | || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT) | |
2168 | bitfield = SET_DEST (body), destflag = 1; | |
2169 | else | |
2170 | bitfield = SET_SRC (body), destflag = 0; | |
2171 | ||
2172 | /* First check that the field being stored has constant size and position | |
2173 | and is in fact a byte or halfword suitably aligned. */ | |
2174 | ||
2175 | if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT | |
2176 | && GET_CODE (XEXP (bitfield, 2)) == CONST_INT | |
2177 | && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1)) | |
2178 | != BLKmode) | |
2179 | && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0) | |
2180 | { | |
2181 | register rtx memref = 0; | |
2182 | ||
2183 | /* Now check that the containing word is memory, not a register, | |
2184 | and that it is safe to change the machine mode. */ | |
2185 | ||
2186 | if (GET_CODE (XEXP (bitfield, 0)) == MEM) | |
2187 | memref = XEXP (bitfield, 0); | |
2188 | else if (GET_CODE (XEXP (bitfield, 0)) == REG | |
2189 | && equiv_mem != 0) | |
2190 | memref = equiv_mem[REGNO (XEXP (bitfield, 0))]; | |
2191 | else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG | |
2192 | && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM) | |
2193 | memref = SUBREG_REG (XEXP (bitfield, 0)); | |
2194 | else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG | |
2195 | && equiv_mem != 0 | |
2196 | && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG) | |
2197 | memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))]; | |
2198 | ||
2199 | if (memref | |
2200 | && ! mode_dependent_address_p (XEXP (memref, 0)) | |
2201 | && ! MEM_VOLATILE_P (memref)) | |
2202 | { | |
2203 | /* Now adjust the address, first for any subreg'ing | |
2204 | that we are now getting rid of, | |
2205 | and then for which byte of the word is wanted. */ | |
2206 | ||
2207 | register int offset = INTVAL (XEXP (bitfield, 2)); | |
2208 | /* Adjust OFFSET to count bits from low-address byte. */ | |
2209 | #if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN | |
2210 | offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0))) | |
2211 | - offset - INTVAL (XEXP (bitfield, 1))); | |
2212 | #endif | |
2213 | /* Adjust OFFSET to count bytes from low-address byte. */ | |
2214 | offset /= BITS_PER_UNIT; | |
2215 | if (GET_CODE (XEXP (bitfield, 0)) == SUBREG) | |
2216 | { | |
2217 | offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD; | |
2218 | #if BYTES_BIG_ENDIAN | |
2219 | offset -= (MIN (UNITS_PER_WORD, | |
2220 | GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0)))) | |
2221 | - MIN (UNITS_PER_WORD, | |
2222 | GET_MODE_SIZE (GET_MODE (memref)))); | |
2223 | #endif | |
2224 | } | |
2225 | ||
2226 | memref = change_address (memref, mode, | |
2227 | plus_constant (XEXP (memref, 0), offset)); | |
2228 | ||
2229 | /* Store this memory reference where | |
2230 | we found the bit field reference. */ | |
2231 | ||
2232 | if (destflag) | |
2233 | { | |
2234 | validate_change (insn, &SET_DEST (body), memref, 1); | |
2235 | if (! CONSTANT_ADDRESS_P (SET_SRC (body))) | |
2236 | { | |
2237 | rtx src = SET_SRC (body); | |
2238 | while (GET_CODE (src) == SUBREG | |
2239 | && SUBREG_WORD (src) == 0) | |
2240 | src = SUBREG_REG (src); | |
2241 | if (GET_MODE (src) != GET_MODE (memref)) | |
2242 | src = gen_lowpart (GET_MODE (memref), SET_SRC (body)); | |
2243 | validate_change (insn, &SET_SRC (body), src, 1); | |
2244 | } | |
2245 | else if (GET_MODE (SET_SRC (body)) != VOIDmode | |
2246 | && GET_MODE (SET_SRC (body)) != GET_MODE (memref)) | |
2247 | /* This shouldn't happen because anything that didn't have | |
2248 | one of these modes should have got converted explicitly | |
2249 | and then referenced through a subreg. | |
2250 | This is so because the original bit-field was | |
2251 | handled by agg_mode and so its tree structure had | |
2252 | the same mode that memref now has. */ | |
2253 | abort (); | |
2254 | } | |
2255 | else | |
2256 | { | |
2257 | rtx dest = SET_DEST (body); | |
2258 | ||
2259 | while (GET_CODE (dest) == SUBREG | |
2260 | && SUBREG_WORD (dest) == 0) | |
2261 | dest = SUBREG_REG (dest); | |
2262 | ||
2263 | validate_change (insn, &SET_DEST (body), dest, 1); | |
2264 | ||
2265 | if (GET_MODE (dest) == GET_MODE (memref)) | |
2266 | validate_change (insn, &SET_SRC (body), memref, 1); | |
2267 | else | |
2268 | { | |
2269 | /* Convert the mem ref to the destination mode. */ | |
2270 | rtx newreg = gen_reg_rtx (GET_MODE (dest)); | |
2271 | ||
2272 | start_sequence (); | |
2273 | convert_move (newreg, memref, | |
2274 | GET_CODE (SET_SRC (body)) == ZERO_EXTRACT); | |
2275 | seq = get_insns (); | |
2276 | end_sequence (); | |
2277 | ||
2278 | validate_change (insn, &SET_SRC (body), newreg, 1); | |
2279 | } | |
2280 | } | |
2281 | ||
2282 | /* See if we can convert this extraction or insertion into | |
2283 | a simple move insn. We might not be able to do so if this | |
2284 | was, for example, part of a PARALLEL. | |
2285 | ||
2286 | If we succeed, write out any needed conversions. If we fail, | |
2287 | it is hard to guess why we failed, so don't do anything | |
2288 | special; just let the optimization be suppressed. */ | |
2289 | ||
2290 | if (apply_change_group () && seq) | |
2291 | emit_insns_before (seq, insn); | |
2292 | } | |
2293 | } | |
2294 | } | |
2295 | \f | |
2296 | /* These routines are responsible for converting virtual register references | |
2297 | to the actual hard register references once RTL generation is complete. | |
2298 | ||
2299 | The following four variables are used for communication between the | |
2300 | routines. They contain the offsets of the virtual registers from their | |
2301 | respective hard registers. */ | |
2302 | ||
2303 | static int in_arg_offset; | |
2304 | static int var_offset; | |
2305 | static int dynamic_offset; | |
2306 | static int out_arg_offset; | |
2307 | ||
2308 | /* In most machines, the stack pointer register is equivalent to the bottom | |
2309 | of the stack. */ | |
2310 | ||
2311 | #ifndef STACK_POINTER_OFFSET | |
2312 | #define STACK_POINTER_OFFSET 0 | |
2313 | #endif | |
2314 | ||
2315 | /* If not defined, pick an appropriate default for the offset of dynamically | |
2316 | allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS, | |
2317 | REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */ | |
2318 | ||
2319 | #ifndef STACK_DYNAMIC_OFFSET | |
2320 | ||
2321 | #ifdef ACCUMULATE_OUTGOING_ARGS | |
2322 | /* The bottom of the stack points to the actual arguments. If | |
2323 | REG_PARM_STACK_SPACE is defined, this includes the space for the register | |
2324 | parameters. However, if OUTGOING_REG_PARM_STACK space is not defined, | |
2325 | stack space for register parameters is not pushed by the caller, but | |
2326 | rather part of the fixed stack areas and hence not included in | |
2327 | `current_function_outgoing_args_size'. Nevertheless, we must allow | |
2328 | for it when allocating stack dynamic objects. */ | |
2329 | ||
2330 | #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE) | |
2331 | #define STACK_DYNAMIC_OFFSET(FNDECL) \ | |
2332 | (current_function_outgoing_args_size \ | |
2333 | + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET)) | |
2334 | ||
2335 | #else | |
2336 | #define STACK_DYNAMIC_OFFSET(FNDECL) \ | |
2337 | (current_function_outgoing_args_size + (STACK_POINTER_OFFSET)) | |
2338 | #endif | |
2339 | ||
2340 | #else | |
2341 | #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET | |
2342 | #endif | |
2343 | #endif | |
2344 | ||
2345 | /* Pass through the INSNS of function FNDECL and convert virtual register | |
2346 | references to hard register references. */ | |
2347 | ||
2348 | void | |
2349 | instantiate_virtual_regs (fndecl, insns) | |
2350 | tree fndecl; | |
2351 | rtx insns; | |
2352 | { | |
2353 | rtx insn; | |
2354 | ||
2355 | /* Compute the offsets to use for this function. */ | |
2356 | in_arg_offset = FIRST_PARM_OFFSET (fndecl); | |
2357 | var_offset = STARTING_FRAME_OFFSET; | |
2358 | dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl); | |
2359 | out_arg_offset = STACK_POINTER_OFFSET; | |
2360 | ||
2361 | /* Scan all variables and parameters of this function. For each that is | |
2362 | in memory, instantiate all virtual registers if the result is a valid | |
2363 | address. If not, we do it later. That will handle most uses of virtual | |
2364 | regs on many machines. */ | |
2365 | instantiate_decls (fndecl, 1); | |
2366 | ||
2367 | /* Initialize recognition, indicating that volatile is OK. */ | |
2368 | init_recog (); | |
2369 | ||
2370 | /* Scan through all the insns, instantiating every virtual register still | |
2371 | present. */ | |
2372 | for (insn = insns; insn; insn = NEXT_INSN (insn)) | |
2373 | if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN | |
2374 | || GET_CODE (insn) == CALL_INSN) | |
2375 | { | |
2376 | instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1); | |
5f4f0e22 | 2377 | instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0); |
6f086dfc RS |
2378 | } |
2379 | ||
2380 | /* Now instantiate the remaining register equivalences for debugging info. | |
2381 | These will not be valid addresses. */ | |
2382 | instantiate_decls (fndecl, 0); | |
2383 | ||
2384 | /* Indicate that, from now on, assign_stack_local should use | |
2385 | frame_pointer_rtx. */ | |
2386 | virtuals_instantiated = 1; | |
2387 | } | |
2388 | ||
2389 | /* Scan all decls in FNDECL (both variables and parameters) and instantiate | |
2390 | all virtual registers in their DECL_RTL's. | |
2391 | ||
2392 | If VALID_ONLY, do this only if the resulting address is still valid. | |
2393 | Otherwise, always do it. */ | |
2394 | ||
2395 | static void | |
2396 | instantiate_decls (fndecl, valid_only) | |
2397 | tree fndecl; | |
2398 | int valid_only; | |
2399 | { | |
2400 | tree decl; | |
2401 | ||
a82ad570 | 2402 | if (DECL_INLINE (fndecl)) |
6f086dfc RS |
2403 | /* When compiling an inline function, the obstack used for |
2404 | rtl allocation is the maybepermanent_obstack. Calling | |
2405 | `resume_temporary_allocation' switches us back to that | |
2406 | obstack while we process this function's parameters. */ | |
2407 | resume_temporary_allocation (); | |
2408 | ||
2409 | /* Process all parameters of the function. */ | |
2410 | for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl)) | |
2411 | { | |
5a73491b RK |
2412 | instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)), |
2413 | valid_only); | |
2414 | instantiate_decl (DECL_INCOMING_RTL (decl), | |
2415 | int_size_in_bytes (TREE_TYPE (decl)), valid_only); | |
6f086dfc RS |
2416 | } |
2417 | ||
2418 | /* Now process all variables defined in the function or its subblocks. */ | |
2419 | instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only); | |
2420 | ||
a82ad570 | 2421 | if (DECL_INLINE (fndecl)) |
6f086dfc RS |
2422 | { |
2423 | /* Save all rtl allocated for this function by raising the | |
2424 | high-water mark on the maybepermanent_obstack. */ | |
2425 | preserve_data (); | |
2426 | /* All further rtl allocation is now done in the current_obstack. */ | |
2427 | rtl_in_current_obstack (); | |
2428 | } | |
2429 | } | |
2430 | ||
2431 | /* Subroutine of instantiate_decls: Process all decls in the given | |
2432 | BLOCK node and all its subblocks. */ | |
2433 | ||
2434 | static void | |
2435 | instantiate_decls_1 (let, valid_only) | |
2436 | tree let; | |
2437 | int valid_only; | |
2438 | { | |
2439 | tree t; | |
2440 | ||
2441 | for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t)) | |
5a73491b RK |
2442 | instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)), |
2443 | valid_only); | |
6f086dfc RS |
2444 | |
2445 | /* Process all subblocks. */ | |
2446 | for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t)) | |
2447 | instantiate_decls_1 (t, valid_only); | |
2448 | } | |
5a73491b | 2449 | |
8008b228 | 2450 | /* Subroutine of the preceding procedures: Given RTL representing a |
5a73491b RK |
2451 | decl and the size of the object, do any instantiation required. |
2452 | ||
2453 | If VALID_ONLY is non-zero, it means that the RTL should only be | |
2454 | changed if the new address is valid. */ | |
2455 | ||
2456 | static void | |
2457 | instantiate_decl (x, size, valid_only) | |
2458 | rtx x; | |
2459 | int size; | |
2460 | int valid_only; | |
2461 | { | |
2462 | enum machine_mode mode; | |
2463 | rtx addr; | |
2464 | ||
2465 | /* If this is not a MEM, no need to do anything. Similarly if the | |
2466 | address is a constant or a register that is not a virtual register. */ | |
2467 | ||
2468 | if (x == 0 || GET_CODE (x) != MEM) | |
2469 | return; | |
2470 | ||
2471 | addr = XEXP (x, 0); | |
2472 | if (CONSTANT_P (addr) | |
2473 | || (GET_CODE (addr) == REG | |
2474 | && (REGNO (addr) < FIRST_VIRTUAL_REGISTER | |
2475 | || REGNO (addr) > LAST_VIRTUAL_REGISTER))) | |
2476 | return; | |
2477 | ||
2478 | /* If we should only do this if the address is valid, copy the address. | |
2479 | We need to do this so we can undo any changes that might make the | |
2480 | address invalid. This copy is unfortunate, but probably can't be | |
2481 | avoided. */ | |
2482 | ||
2483 | if (valid_only) | |
2484 | addr = copy_rtx (addr); | |
2485 | ||
2486 | instantiate_virtual_regs_1 (&addr, NULL_RTX, 0); | |
2487 | ||
2488 | if (! valid_only) | |
2489 | return; | |
2490 | ||
2491 | /* Now verify that the resulting address is valid for every integer or | |
2492 | floating-point mode up to and including SIZE bytes long. We do this | |
2493 | since the object might be accessed in any mode and frame addresses | |
2494 | are shared. */ | |
2495 | ||
2496 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
2497 | mode != VOIDmode && GET_MODE_SIZE (mode) <= size; | |
2498 | mode = GET_MODE_WIDER_MODE (mode)) | |
2499 | if (! memory_address_p (mode, addr)) | |
2500 | return; | |
2501 | ||
2502 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); | |
2503 | mode != VOIDmode && GET_MODE_SIZE (mode) <= size; | |
2504 | mode = GET_MODE_WIDER_MODE (mode)) | |
2505 | if (! memory_address_p (mode, addr)) | |
2506 | return; | |
2507 | ||
2508 | /* Otherwise, put back the address, now that we have updated it and we | |
2509 | know it is valid. */ | |
2510 | ||
2511 | XEXP (x, 0) = addr; | |
2512 | } | |
6f086dfc RS |
2513 | \f |
2514 | /* Given a pointer to a piece of rtx and an optional pointer to the | |
2515 | containing object, instantiate any virtual registers present in it. | |
2516 | ||
2517 | If EXTRA_INSNS, we always do the replacement and generate | |
2518 | any extra insns before OBJECT. If it zero, we do nothing if replacement | |
2519 | is not valid. | |
2520 | ||
2521 | Return 1 if we either had nothing to do or if we were able to do the | |
2522 | needed replacement. Return 0 otherwise; we only return zero if | |
2523 | EXTRA_INSNS is zero. | |
2524 | ||
2525 | We first try some simple transformations to avoid the creation of extra | |
2526 | pseudos. */ | |
2527 | ||
2528 | static int | |
2529 | instantiate_virtual_regs_1 (loc, object, extra_insns) | |
2530 | rtx *loc; | |
2531 | rtx object; | |
2532 | int extra_insns; | |
2533 | { | |
2534 | rtx x; | |
2535 | RTX_CODE code; | |
2536 | rtx new = 0; | |
2537 | int offset; | |
2538 | rtx temp; | |
2539 | rtx seq; | |
2540 | int i, j; | |
2541 | char *fmt; | |
2542 | ||
2543 | /* Re-start here to avoid recursion in common cases. */ | |
2544 | restart: | |
2545 | ||
2546 | x = *loc; | |
2547 | if (x == 0) | |
2548 | return 1; | |
2549 | ||
2550 | code = GET_CODE (x); | |
2551 | ||
2552 | /* Check for some special cases. */ | |
2553 | switch (code) | |
2554 | { | |
2555 | case CONST_INT: | |
2556 | case CONST_DOUBLE: | |
2557 | case CONST: | |
2558 | case SYMBOL_REF: | |
2559 | case CODE_LABEL: | |
2560 | case PC: | |
2561 | case CC0: | |
2562 | case ASM_INPUT: | |
2563 | case ADDR_VEC: | |
2564 | case ADDR_DIFF_VEC: | |
2565 | case RETURN: | |
2566 | return 1; | |
2567 | ||
2568 | case SET: | |
2569 | /* We are allowed to set the virtual registers. This means that | |
2570 | that the actual register should receive the source minus the | |
2571 | appropriate offset. This is used, for example, in the handling | |
2572 | of non-local gotos. */ | |
2573 | if (SET_DEST (x) == virtual_incoming_args_rtx) | |
2574 | new = arg_pointer_rtx, offset = - in_arg_offset; | |
2575 | else if (SET_DEST (x) == virtual_stack_vars_rtx) | |
2576 | new = frame_pointer_rtx, offset = - var_offset; | |
2577 | else if (SET_DEST (x) == virtual_stack_dynamic_rtx) | |
2578 | new = stack_pointer_rtx, offset = - dynamic_offset; | |
2579 | else if (SET_DEST (x) == virtual_outgoing_args_rtx) | |
2580 | new = stack_pointer_rtx, offset = - out_arg_offset; | |
2581 | ||
2582 | if (new) | |
2583 | { | |
2584 | /* The only valid sources here are PLUS or REG. Just do | |
2585 | the simplest possible thing to handle them. */ | |
2586 | if (GET_CODE (SET_SRC (x)) != REG | |
2587 | && GET_CODE (SET_SRC (x)) != PLUS) | |
2588 | abort (); | |
2589 | ||
2590 | start_sequence (); | |
2591 | if (GET_CODE (SET_SRC (x)) != REG) | |
5f4f0e22 | 2592 | temp = force_operand (SET_SRC (x), NULL_RTX); |
6f086dfc RS |
2593 | else |
2594 | temp = SET_SRC (x); | |
5f4f0e22 | 2595 | temp = force_operand (plus_constant (temp, offset), NULL_RTX); |
6f086dfc RS |
2596 | seq = get_insns (); |
2597 | end_sequence (); | |
2598 | ||
2599 | emit_insns_before (seq, object); | |
2600 | SET_DEST (x) = new; | |
2601 | ||
2602 | if (!validate_change (object, &SET_SRC (x), temp, 0) | |
2603 | || ! extra_insns) | |
2604 | abort (); | |
2605 | ||
2606 | return 1; | |
2607 | } | |
2608 | ||
2609 | instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns); | |
2610 | loc = &SET_SRC (x); | |
2611 | goto restart; | |
2612 | ||
2613 | case PLUS: | |
2614 | /* Handle special case of virtual register plus constant. */ | |
2615 | if (CONSTANT_P (XEXP (x, 1))) | |
2616 | { | |
2617 | rtx old; | |
2618 | ||
2619 | /* Check for (plus (plus VIRT foo) (const_int)) first. */ | |
2620 | if (GET_CODE (XEXP (x, 0)) == PLUS) | |
2621 | { | |
2622 | rtx inner = XEXP (XEXP (x, 0), 0); | |
2623 | ||
2624 | if (inner == virtual_incoming_args_rtx) | |
2625 | new = arg_pointer_rtx, offset = in_arg_offset; | |
2626 | else if (inner == virtual_stack_vars_rtx) | |
2627 | new = frame_pointer_rtx, offset = var_offset; | |
2628 | else if (inner == virtual_stack_dynamic_rtx) | |
2629 | new = stack_pointer_rtx, offset = dynamic_offset; | |
2630 | else if (inner == virtual_outgoing_args_rtx) | |
2631 | new = stack_pointer_rtx, offset = out_arg_offset; | |
2632 | else | |
2633 | { | |
2634 | loc = &XEXP (x, 0); | |
2635 | goto restart; | |
2636 | } | |
2637 | ||
2638 | instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object, | |
2639 | extra_insns); | |
2640 | new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1)); | |
2641 | } | |
2642 | ||
2643 | else if (XEXP (x, 0) == virtual_incoming_args_rtx) | |
2644 | new = arg_pointer_rtx, offset = in_arg_offset; | |
2645 | else if (XEXP (x, 0) == virtual_stack_vars_rtx) | |
2646 | new = frame_pointer_rtx, offset = var_offset; | |
2647 | else if (XEXP (x, 0) == virtual_stack_dynamic_rtx) | |
2648 | new = stack_pointer_rtx, offset = dynamic_offset; | |
2649 | else if (XEXP (x, 0) == virtual_outgoing_args_rtx) | |
2650 | new = stack_pointer_rtx, offset = out_arg_offset; | |
2651 | else | |
2652 | { | |
2653 | /* We know the second operand is a constant. Unless the | |
2654 | first operand is a REG (which has been already checked), | |
2655 | it needs to be checked. */ | |
2656 | if (GET_CODE (XEXP (x, 0)) != REG) | |
2657 | { | |
2658 | loc = &XEXP (x, 0); | |
2659 | goto restart; | |
2660 | } | |
2661 | return 1; | |
2662 | } | |
2663 | ||
2664 | old = XEXP (x, 0); | |
2665 | XEXP (x, 0) = new; | |
2666 | new = plus_constant (XEXP (x, 1), offset); | |
2667 | ||
2668 | /* If the new constant is zero, try to replace the sum with its | |
2669 | first operand. */ | |
2670 | if (new == const0_rtx | |
2671 | && validate_change (object, loc, XEXP (x, 0), 0)) | |
2672 | return 1; | |
2673 | ||
2674 | /* Next try to replace constant with new one. */ | |
2675 | if (!validate_change (object, &XEXP (x, 1), new, 0)) | |
2676 | { | |
2677 | if (! extra_insns) | |
2678 | { | |
2679 | XEXP (x, 0) = old; | |
2680 | return 0; | |
2681 | } | |
2682 | ||
2683 | /* Otherwise copy the new constant into a register and replace | |
2684 | constant with that register. */ | |
2685 | temp = gen_reg_rtx (Pmode); | |
2686 | if (validate_change (object, &XEXP (x, 1), temp, 0)) | |
2687 | emit_insn_before (gen_move_insn (temp, new), object); | |
2688 | else | |
2689 | { | |
2690 | /* If that didn't work, replace this expression with a | |
2691 | register containing the sum. */ | |
2692 | ||
2693 | new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new); | |
2694 | XEXP (x, 0) = old; | |
2695 | ||
2696 | start_sequence (); | |
5f4f0e22 | 2697 | temp = force_operand (new, NULL_RTX); |
6f086dfc RS |
2698 | seq = get_insns (); |
2699 | end_sequence (); | |
2700 | ||
2701 | emit_insns_before (seq, object); | |
2702 | if (! validate_change (object, loc, temp, 0) | |
2703 | && ! validate_replace_rtx (x, temp, object)) | |
2704 | abort (); | |
2705 | } | |
2706 | } | |
2707 | ||
2708 | return 1; | |
2709 | } | |
2710 | ||
2711 | /* Fall through to generic two-operand expression case. */ | |
2712 | case EXPR_LIST: | |
2713 | case CALL: | |
2714 | case COMPARE: | |
2715 | case MINUS: | |
2716 | case MULT: | |
2717 | case DIV: case UDIV: | |
2718 | case MOD: case UMOD: | |
2719 | case AND: case IOR: case XOR: | |
2720 | case LSHIFT: case ASHIFT: case ROTATE: | |
2721 | case ASHIFTRT: case LSHIFTRT: case ROTATERT: | |
2722 | case NE: case EQ: | |
2723 | case GE: case GT: case GEU: case GTU: | |
2724 | case LE: case LT: case LEU: case LTU: | |
2725 | if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1))) | |
2726 | instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns); | |
2727 | loc = &XEXP (x, 0); | |
2728 | goto restart; | |
2729 | ||
2730 | case MEM: | |
2731 | /* Most cases of MEM that convert to valid addresses have already been | |
2732 | handled by our scan of regno_reg_rtx. The only special handling we | |
2733 | need here is to make a copy of the rtx to ensure it isn't being | |
b335c2cc | 2734 | shared if we have to change it to a pseudo. |
6f086dfc RS |
2735 | |
2736 | If the rtx is a simple reference to an address via a virtual register, | |
2737 | it can potentially be shared. In such cases, first try to make it | |
2738 | a valid address, which can also be shared. Otherwise, copy it and | |
2739 | proceed normally. | |
2740 | ||
2741 | First check for common cases that need no processing. These are | |
2742 | usually due to instantiation already being done on a previous instance | |
2743 | of a shared rtx. */ | |
2744 | ||
2745 | temp = XEXP (x, 0); | |
2746 | if (CONSTANT_ADDRESS_P (temp) | |
2747 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
2748 | || temp == arg_pointer_rtx | |
b37f453b DE |
2749 | #endif |
2750 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
2751 | || temp == hard_frame_pointer_rtx | |
6f086dfc RS |
2752 | #endif |
2753 | || temp == frame_pointer_rtx) | |
2754 | return 1; | |
2755 | ||
2756 | if (GET_CODE (temp) == PLUS | |
2757 | && CONSTANT_ADDRESS_P (XEXP (temp, 1)) | |
2758 | && (XEXP (temp, 0) == frame_pointer_rtx | |
b37f453b DE |
2759 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
2760 | || XEXP (temp, 0) == hard_frame_pointer_rtx | |
2761 | #endif | |
6f086dfc RS |
2762 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
2763 | || XEXP (temp, 0) == arg_pointer_rtx | |
2764 | #endif | |
2765 | )) | |
2766 | return 1; | |
2767 | ||
2768 | if (temp == virtual_stack_vars_rtx | |
2769 | || temp == virtual_incoming_args_rtx | |
2770 | || (GET_CODE (temp) == PLUS | |
2771 | && CONSTANT_ADDRESS_P (XEXP (temp, 1)) | |
2772 | && (XEXP (temp, 0) == virtual_stack_vars_rtx | |
2773 | || XEXP (temp, 0) == virtual_incoming_args_rtx))) | |
2774 | { | |
2775 | /* This MEM may be shared. If the substitution can be done without | |
2776 | the need to generate new pseudos, we want to do it in place | |
2777 | so all copies of the shared rtx benefit. The call below will | |
2778 | only make substitutions if the resulting address is still | |
2779 | valid. | |
2780 | ||
2781 | Note that we cannot pass X as the object in the recursive call | |
2782 | since the insn being processed may not allow all valid | |
6461be14 RS |
2783 | addresses. However, if we were not passed on object, we can |
2784 | only modify X without copying it if X will have a valid | |
2785 | address. | |
6f086dfc | 2786 | |
6461be14 RS |
2787 | ??? Also note that this can still lose if OBJECT is an insn that |
2788 | has less restrictions on an address that some other insn. | |
2789 | In that case, we will modify the shared address. This case | |
2790 | doesn't seem very likely, though. */ | |
2791 | ||
2792 | if (instantiate_virtual_regs_1 (&XEXP (x, 0), | |
2793 | object ? object : x, 0)) | |
6f086dfc RS |
2794 | return 1; |
2795 | ||
2796 | /* Otherwise make a copy and process that copy. We copy the entire | |
2797 | RTL expression since it might be a PLUS which could also be | |
2798 | shared. */ | |
2799 | *loc = x = copy_rtx (x); | |
2800 | } | |
2801 | ||
2802 | /* Fall through to generic unary operation case. */ | |
2803 | case USE: | |
2804 | case CLOBBER: | |
2805 | case SUBREG: | |
2806 | case STRICT_LOW_PART: | |
2807 | case NEG: case NOT: | |
2808 | case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC: | |
2809 | case SIGN_EXTEND: case ZERO_EXTEND: | |
2810 | case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: | |
2811 | case FLOAT: case FIX: | |
2812 | case UNSIGNED_FIX: case UNSIGNED_FLOAT: | |
2813 | case ABS: | |
2814 | case SQRT: | |
2815 | case FFS: | |
2816 | /* These case either have just one operand or we know that we need not | |
2817 | check the rest of the operands. */ | |
2818 | loc = &XEXP (x, 0); | |
2819 | goto restart; | |
2820 | ||
2821 | case REG: | |
2822 | /* Try to replace with a PLUS. If that doesn't work, compute the sum | |
2823 | in front of this insn and substitute the temporary. */ | |
2824 | if (x == virtual_incoming_args_rtx) | |
2825 | new = arg_pointer_rtx, offset = in_arg_offset; | |
2826 | else if (x == virtual_stack_vars_rtx) | |
2827 | new = frame_pointer_rtx, offset = var_offset; | |
2828 | else if (x == virtual_stack_dynamic_rtx) | |
2829 | new = stack_pointer_rtx, offset = dynamic_offset; | |
2830 | else if (x == virtual_outgoing_args_rtx) | |
2831 | new = stack_pointer_rtx, offset = out_arg_offset; | |
2832 | ||
2833 | if (new) | |
2834 | { | |
2835 | temp = plus_constant (new, offset); | |
2836 | if (!validate_change (object, loc, temp, 0)) | |
2837 | { | |
2838 | if (! extra_insns) | |
2839 | return 0; | |
2840 | ||
2841 | start_sequence (); | |
5f4f0e22 | 2842 | temp = force_operand (temp, NULL_RTX); |
6f086dfc RS |
2843 | seq = get_insns (); |
2844 | end_sequence (); | |
2845 | ||
2846 | emit_insns_before (seq, object); | |
2847 | if (! validate_change (object, loc, temp, 0) | |
2848 | && ! validate_replace_rtx (x, temp, object)) | |
2849 | abort (); | |
2850 | } | |
2851 | } | |
2852 | ||
2853 | return 1; | |
2854 | } | |
2855 | ||
2856 | /* Scan all subexpressions. */ | |
2857 | fmt = GET_RTX_FORMAT (code); | |
2858 | for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) | |
2859 | if (*fmt == 'e') | |
2860 | { | |
2861 | if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns)) | |
2862 | return 0; | |
2863 | } | |
2864 | else if (*fmt == 'E') | |
2865 | for (j = 0; j < XVECLEN (x, i); j++) | |
2866 | if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object, | |
2867 | extra_insns)) | |
2868 | return 0; | |
2869 | ||
2870 | return 1; | |
2871 | } | |
2872 | \f | |
2873 | /* Optimization: assuming this function does not receive nonlocal gotos, | |
2874 | delete the handlers for such, as well as the insns to establish | |
2875 | and disestablish them. */ | |
2876 | ||
2877 | static void | |
2878 | delete_handlers () | |
2879 | { | |
2880 | rtx insn; | |
2881 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
2882 | { | |
2883 | /* Delete the handler by turning off the flag that would | |
2884 | prevent jump_optimize from deleting it. | |
2885 | Also permit deletion of the nonlocal labels themselves | |
2886 | if nothing local refers to them. */ | |
2887 | if (GET_CODE (insn) == CODE_LABEL) | |
2888 | LABEL_PRESERVE_P (insn) = 0; | |
2889 | if (GET_CODE (insn) == INSN | |
59257ff7 RK |
2890 | && ((nonlocal_goto_handler_slot != 0 |
2891 | && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn))) | |
2892 | || (nonlocal_goto_stack_level != 0 | |
2893 | && reg_mentioned_p (nonlocal_goto_stack_level, | |
2894 | PATTERN (insn))))) | |
6f086dfc RS |
2895 | delete_insn (insn); |
2896 | } | |
2897 | } | |
2898 | ||
2899 | /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels | |
2900 | of the current function. */ | |
2901 | ||
2902 | rtx | |
2903 | nonlocal_label_rtx_list () | |
2904 | { | |
2905 | tree t; | |
2906 | rtx x = 0; | |
2907 | ||
2908 | for (t = nonlocal_labels; t; t = TREE_CHAIN (t)) | |
2909 | x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x); | |
2910 | ||
2911 | return x; | |
2912 | } | |
2913 | \f | |
2914 | /* Output a USE for any register use in RTL. | |
2915 | This is used with -noreg to mark the extent of lifespan | |
2916 | of any registers used in a user-visible variable's DECL_RTL. */ | |
2917 | ||
2918 | void | |
2919 | use_variable (rtl) | |
2920 | rtx rtl; | |
2921 | { | |
2922 | if (GET_CODE (rtl) == REG) | |
2923 | /* This is a register variable. */ | |
2924 | emit_insn (gen_rtx (USE, VOIDmode, rtl)); | |
2925 | else if (GET_CODE (rtl) == MEM | |
2926 | && GET_CODE (XEXP (rtl, 0)) == REG | |
2927 | && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER | |
2928 | || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER) | |
2929 | && XEXP (rtl, 0) != current_function_internal_arg_pointer) | |
2930 | /* This is a variable-sized structure. */ | |
2931 | emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0))); | |
2932 | } | |
2933 | ||
2934 | /* Like use_variable except that it outputs the USEs after INSN | |
2935 | instead of at the end of the insn-chain. */ | |
2936 | ||
2937 | void | |
2938 | use_variable_after (rtl, insn) | |
2939 | rtx rtl, insn; | |
2940 | { | |
2941 | if (GET_CODE (rtl) == REG) | |
2942 | /* This is a register variable. */ | |
2943 | emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn); | |
2944 | else if (GET_CODE (rtl) == MEM | |
2945 | && GET_CODE (XEXP (rtl, 0)) == REG | |
2946 | && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER | |
2947 | || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER) | |
2948 | && XEXP (rtl, 0) != current_function_internal_arg_pointer) | |
2949 | /* This is a variable-sized structure. */ | |
2950 | emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn); | |
2951 | } | |
2952 | \f | |
2953 | int | |
2954 | max_parm_reg_num () | |
2955 | { | |
2956 | return max_parm_reg; | |
2957 | } | |
2958 | ||
2959 | /* Return the first insn following those generated by `assign_parms'. */ | |
2960 | ||
2961 | rtx | |
2962 | get_first_nonparm_insn () | |
2963 | { | |
2964 | if (last_parm_insn) | |
2965 | return NEXT_INSN (last_parm_insn); | |
2966 | return get_insns (); | |
2967 | } | |
2968 | ||
5378192b RS |
2969 | /* Return the first NOTE_INSN_BLOCK_BEG note in the function. |
2970 | Crash if there is none. */ | |
2971 | ||
2972 | rtx | |
2973 | get_first_block_beg () | |
2974 | { | |
2975 | register rtx searcher; | |
2976 | register rtx insn = get_first_nonparm_insn (); | |
2977 | ||
2978 | for (searcher = insn; searcher; searcher = NEXT_INSN (searcher)) | |
2979 | if (GET_CODE (searcher) == NOTE | |
2980 | && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG) | |
2981 | return searcher; | |
2982 | ||
2983 | abort (); /* Invalid call to this function. (See comments above.) */ | |
2984 | return NULL_RTX; | |
2985 | } | |
2986 | ||
d181c154 RS |
2987 | /* Return 1 if EXP is an aggregate type (or a value with aggregate type). |
2988 | This means a type for which function calls must pass an address to the | |
2989 | function or get an address back from the function. | |
2990 | EXP may be a type node or an expression (whose type is tested). */ | |
6f086dfc RS |
2991 | |
2992 | int | |
2993 | aggregate_value_p (exp) | |
2994 | tree exp; | |
2995 | { | |
9d790a4f RS |
2996 | int i, regno, nregs; |
2997 | rtx reg; | |
d181c154 RS |
2998 | tree type; |
2999 | if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't') | |
3000 | type = exp; | |
3001 | else | |
3002 | type = TREE_TYPE (exp); | |
3003 | ||
3004 | if (RETURN_IN_MEMORY (type)) | |
6f086dfc RS |
3005 | return 1; |
3006 | if (flag_pcc_struct_return | |
d181c154 RS |
3007 | && (TREE_CODE (type) == RECORD_TYPE |
3008 | || TREE_CODE (type) == UNION_TYPE | |
3009 | || TREE_CODE (type) == QUAL_UNION_TYPE | |
3010 | || TREE_CODE (type) == ARRAY_TYPE)) | |
6f086dfc | 3011 | return 1; |
9d790a4f RS |
3012 | /* Make sure we have suitable call-clobbered regs to return |
3013 | the value in; if not, we must return it in memory. */ | |
d181c154 | 3014 | reg = hard_function_value (type, 0); |
9d790a4f | 3015 | regno = REGNO (reg); |
d181c154 | 3016 | nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type)); |
9d790a4f RS |
3017 | for (i = 0; i < nregs; i++) |
3018 | if (! call_used_regs[regno + i]) | |
3019 | return 1; | |
6f086dfc RS |
3020 | return 0; |
3021 | } | |
3022 | \f | |
3023 | /* Assign RTL expressions to the function's parameters. | |
3024 | This may involve copying them into registers and using | |
3025 | those registers as the RTL for them. | |
3026 | ||
3027 | If SECOND_TIME is non-zero it means that this function is being | |
3028 | called a second time. This is done by integrate.c when a function's | |
3029 | compilation is deferred. We need to come back here in case the | |
3030 | FUNCTION_ARG macro computes items needed for the rest of the compilation | |
3031 | (such as changing which registers are fixed or caller-saved). But suppress | |
3032 | writing any insns or setting DECL_RTL of anything in this case. */ | |
3033 | ||
3034 | void | |
3035 | assign_parms (fndecl, second_time) | |
3036 | tree fndecl; | |
3037 | int second_time; | |
3038 | { | |
3039 | register tree parm; | |
3040 | register rtx entry_parm = 0; | |
3041 | register rtx stack_parm = 0; | |
3042 | CUMULATIVE_ARGS args_so_far; | |
a53e14c0 | 3043 | enum machine_mode promoted_mode, passed_mode, nominal_mode; |
00d8a4c1 | 3044 | int unsignedp; |
6f086dfc RS |
3045 | /* Total space needed so far for args on the stack, |
3046 | given as a constant and a tree-expression. */ | |
3047 | struct args_size stack_args_size; | |
3048 | tree fntype = TREE_TYPE (fndecl); | |
3049 | tree fnargs = DECL_ARGUMENTS (fndecl); | |
3050 | /* This is used for the arg pointer when referring to stack args. */ | |
3051 | rtx internal_arg_pointer; | |
3052 | /* This is a dummy PARM_DECL that we used for the function result if | |
3053 | the function returns a structure. */ | |
3054 | tree function_result_decl = 0; | |
3055 | int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1; | |
3056 | int varargs_setup = 0; | |
3412b298 | 3057 | rtx conversion_insns = 0; |
de3ab9df RS |
3058 | /* FUNCTION_ARG may look at this variable. Since this is not |
3059 | expanding a call it will always be zero in this function. */ | |
3060 | int current_call_is_indirect = 0; | |
6f086dfc RS |
3061 | |
3062 | /* Nonzero if the last arg is named `__builtin_va_alist', | |
3063 | which is used on some machines for old-fashioned non-ANSI varargs.h; | |
3064 | this should be stuck onto the stack as if it had arrived there. */ | |
3065 | int vararg | |
3066 | = (fnargs | |
3067 | && (parm = tree_last (fnargs)) != 0 | |
3068 | && DECL_NAME (parm) | |
3069 | && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)), | |
3070 | "__builtin_va_alist"))); | |
3071 | ||
3072 | /* Nonzero if function takes extra anonymous args. | |
3073 | This means the last named arg must be on the stack | |
3074 | right before the anonymous ones. */ | |
3075 | int stdarg | |
3076 | = (TYPE_ARG_TYPES (fntype) != 0 | |
3077 | && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) | |
3078 | != void_type_node)); | |
3079 | ||
3080 | /* If the reg that the virtual arg pointer will be translated into is | |
3081 | not a fixed reg or is the stack pointer, make a copy of the virtual | |
3082 | arg pointer, and address parms via the copy. The frame pointer is | |
3083 | considered fixed even though it is not marked as such. | |
3084 | ||
3085 | The second time through, simply use ap to avoid generating rtx. */ | |
3086 | ||
3087 | if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM | |
3088 | || ! (fixed_regs[ARG_POINTER_REGNUM] | |
3089 | || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)) | |
3090 | && ! second_time) | |
3091 | internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx); | |
3092 | else | |
3093 | internal_arg_pointer = virtual_incoming_args_rtx; | |
3094 | current_function_internal_arg_pointer = internal_arg_pointer; | |
3095 | ||
3096 | stack_args_size.constant = 0; | |
3097 | stack_args_size.var = 0; | |
3098 | ||
3099 | /* If struct value address is treated as the first argument, make it so. */ | |
3100 | if (aggregate_value_p (DECL_RESULT (fndecl)) | |
3101 | && ! current_function_returns_pcc_struct | |
3102 | && struct_value_incoming_rtx == 0) | |
3103 | { | |
3104 | tree type = build_pointer_type (fntype); | |
3105 | ||
5f4f0e22 | 3106 | function_result_decl = build_decl (PARM_DECL, NULL_TREE, type); |
6f086dfc RS |
3107 | |
3108 | DECL_ARG_TYPE (function_result_decl) = type; | |
3109 | TREE_CHAIN (function_result_decl) = fnargs; | |
3110 | fnargs = function_result_decl; | |
3111 | } | |
3112 | ||
3113 | parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx)); | |
3114 | bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx)); | |
3115 | ||
3116 | #ifdef INIT_CUMULATIVE_INCOMING_ARGS | |
ea0d4c4b | 3117 | INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX); |
6f086dfc | 3118 | #else |
ea0d4c4b | 3119 | INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX); |
6f086dfc RS |
3120 | #endif |
3121 | ||
3122 | /* We haven't yet found an argument that we must push and pretend the | |
3123 | caller did. */ | |
3124 | current_function_pretend_args_size = 0; | |
3125 | ||
3126 | for (parm = fnargs; parm; parm = TREE_CHAIN (parm)) | |
3127 | { | |
3128 | int aggregate | |
3129 | = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE | |
3130 | || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE | |
c1b98a95 RK |
3131 | || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE |
3132 | || TREE_CODE (TREE_TYPE (parm)) == QUAL_UNION_TYPE); | |
6f086dfc RS |
3133 | struct args_size stack_offset; |
3134 | struct args_size arg_size; | |
3135 | int passed_pointer = 0; | |
3136 | tree passed_type = DECL_ARG_TYPE (parm); | |
3137 | ||
3138 | /* Set LAST_NAMED if this is last named arg before some | |
3139 | anonymous args. We treat it as if it were anonymous too. */ | |
3140 | int last_named = ((TREE_CHAIN (parm) == 0 | |
3141 | || DECL_NAME (TREE_CHAIN (parm)) == 0) | |
3142 | && (vararg || stdarg)); | |
3143 | ||
3144 | if (TREE_TYPE (parm) == error_mark_node | |
3145 | /* This can happen after weird syntax errors | |
3146 | or if an enum type is defined among the parms. */ | |
3147 | || TREE_CODE (parm) != PARM_DECL | |
3148 | || passed_type == NULL) | |
3149 | { | |
587cb682 TW |
3150 | DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode, |
3151 | const0_rtx); | |
6f086dfc RS |
3152 | TREE_USED (parm) = 1; |
3153 | continue; | |
3154 | } | |
3155 | ||
3156 | /* For varargs.h function, save info about regs and stack space | |
3157 | used by the individual args, not including the va_alist arg. */ | |
3158 | if (vararg && last_named) | |
3159 | current_function_args_info = args_so_far; | |
3160 | ||
3161 | /* Find mode of arg as it is passed, and mode of arg | |
3162 | as it should be during execution of this function. */ | |
3163 | passed_mode = TYPE_MODE (passed_type); | |
3164 | nominal_mode = TYPE_MODE (TREE_TYPE (parm)); | |
3165 | ||
16bae307 RS |
3166 | /* If the parm's mode is VOID, its value doesn't matter, |
3167 | and avoid the usual things like emit_move_insn that could crash. */ | |
3168 | if (nominal_mode == VOIDmode) | |
3169 | { | |
3170 | DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx; | |
3171 | continue; | |
3172 | } | |
3173 | ||
a14ae508 RK |
3174 | /* See if this arg was passed by invisible reference. It is if |
3175 | it is an object whose size depends on the contents of the | |
3176 | object itself or if the machine requires these objects be passed | |
3177 | that way. */ | |
3178 | ||
3179 | if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST | |
3180 | && contains_placeholder_p (TYPE_SIZE (passed_type))) | |
6f086dfc | 3181 | #ifdef FUNCTION_ARG_PASS_BY_REFERENCE |
a14ae508 RK |
3182 | || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode, |
3183 | passed_type, ! last_named) | |
3184 | #endif | |
3185 | ) | |
6f086dfc RS |
3186 | { |
3187 | passed_type = build_pointer_type (passed_type); | |
3188 | passed_pointer = 1; | |
3189 | passed_mode = nominal_mode = Pmode; | |
3190 | } | |
6f086dfc | 3191 | |
a53e14c0 RK |
3192 | promoted_mode = passed_mode; |
3193 | ||
3194 | #ifdef PROMOTE_FUNCTION_ARGS | |
3195 | /* Compute the mode in which the arg is actually extended to. */ | |
3196 | if (TREE_CODE (passed_type) == INTEGER_TYPE | |
3197 | || TREE_CODE (passed_type) == ENUMERAL_TYPE | |
3198 | || TREE_CODE (passed_type) == BOOLEAN_TYPE | |
3199 | || TREE_CODE (passed_type) == CHAR_TYPE | |
3200 | || TREE_CODE (passed_type) == REAL_TYPE | |
3201 | || TREE_CODE (passed_type) == POINTER_TYPE | |
3202 | || TREE_CODE (passed_type) == OFFSET_TYPE) | |
3203 | { | |
3204 | unsignedp = TREE_UNSIGNED (passed_type); | |
3205 | PROMOTE_MODE (promoted_mode, unsignedp, passed_type); | |
3206 | } | |
3207 | #endif | |
3208 | ||
6f086dfc RS |
3209 | /* Let machine desc say which reg (if any) the parm arrives in. |
3210 | 0 means it arrives on the stack. */ | |
3211 | #ifdef FUNCTION_INCOMING_ARG | |
a53e14c0 | 3212 | entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode, |
6f086dfc RS |
3213 | passed_type, ! last_named); |
3214 | #else | |
a53e14c0 | 3215 | entry_parm = FUNCTION_ARG (args_so_far, promoted_mode, |
6f086dfc RS |
3216 | passed_type, ! last_named); |
3217 | #endif | |
3218 | ||
a53e14c0 RK |
3219 | if (entry_parm) |
3220 | passed_mode = promoted_mode; | |
3221 | ||
6f086dfc RS |
3222 | #ifdef SETUP_INCOMING_VARARGS |
3223 | /* If this is the last named parameter, do any required setup for | |
3224 | varargs or stdargs. We need to know about the case of this being an | |
3225 | addressable type, in which case we skip the registers it | |
3226 | would have arrived in. | |
3227 | ||
3228 | For stdargs, LAST_NAMED will be set for two parameters, the one that | |
3229 | is actually the last named, and the dummy parameter. We only | |
3230 | want to do this action once. | |
3231 | ||
3232 | Also, indicate when RTL generation is to be suppressed. */ | |
3233 | if (last_named && !varargs_setup) | |
3234 | { | |
3235 | SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type, | |
3236 | current_function_pretend_args_size, | |
3237 | second_time); | |
3238 | varargs_setup = 1; | |
3239 | } | |
3240 | #endif | |
3241 | ||
3242 | /* Determine parm's home in the stack, | |
3243 | in case it arrives in the stack or we should pretend it did. | |
3244 | ||
3245 | Compute the stack position and rtx where the argument arrives | |
3246 | and its size. | |
3247 | ||
3248 | There is one complexity here: If this was a parameter that would | |
3249 | have been passed in registers, but wasn't only because it is | |
3250 | __builtin_va_alist, we want locate_and_pad_parm to treat it as if | |
3251 | it came in a register so that REG_PARM_STACK_SPACE isn't skipped. | |
3252 | In this case, we call FUNCTION_ARG with NAMED set to 1 instead of | |
3253 | 0 as it was the previous time. */ | |
3254 | ||
3255 | locate_and_pad_parm (passed_mode, passed_type, | |
3256 | #ifdef STACK_PARMS_IN_REG_PARM_AREA | |
3257 | 1, | |
3258 | #else | |
3259 | #ifdef FUNCTION_INCOMING_ARG | |
3260 | FUNCTION_INCOMING_ARG (args_so_far, passed_mode, | |
3261 | passed_type, | |
3262 | (! last_named | |
3263 | || varargs_setup)) != 0, | |
3264 | #else | |
3265 | FUNCTION_ARG (args_so_far, passed_mode, | |
3266 | passed_type, | |
3267 | ! last_named || varargs_setup) != 0, | |
3268 | #endif | |
3269 | #endif | |
3270 | fndecl, &stack_args_size, &stack_offset, &arg_size); | |
3271 | ||
3272 | if (! second_time) | |
3273 | { | |
3274 | rtx offset_rtx = ARGS_SIZE_RTX (stack_offset); | |
3275 | ||
3276 | if (offset_rtx == const0_rtx) | |
3277 | stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer); | |
3278 | else | |
3279 | stack_parm = gen_rtx (MEM, passed_mode, | |
3280 | gen_rtx (PLUS, Pmode, | |
3281 | internal_arg_pointer, offset_rtx)); | |
3282 | ||
3283 | /* If this is a memory ref that contains aggregate components, | |
3284 | mark it as such for cse and loop optimize. */ | |
3285 | MEM_IN_STRUCT_P (stack_parm) = aggregate; | |
3286 | } | |
3287 | ||
3288 | /* If this parameter was passed both in registers and in the stack, | |
3289 | use the copy on the stack. */ | |
3290 | if (MUST_PASS_IN_STACK (passed_mode, passed_type)) | |
3291 | entry_parm = 0; | |
3292 | ||
461beb10 | 3293 | #ifdef FUNCTION_ARG_PARTIAL_NREGS |
6f086dfc RS |
3294 | /* If this parm was passed part in regs and part in memory, |
3295 | pretend it arrived entirely in memory | |
3296 | by pushing the register-part onto the stack. | |
3297 | ||
3298 | In the special case of a DImode or DFmode that is split, | |
3299 | we could put it together in a pseudoreg directly, | |
3300 | but for now that's not worth bothering with. */ | |
3301 | ||
3302 | if (entry_parm) | |
3303 | { | |
461beb10 DE |
3304 | int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode, |
3305 | passed_type, ! last_named); | |
6f086dfc RS |
3306 | |
3307 | if (nregs > 0) | |
3308 | { | |
3309 | current_function_pretend_args_size | |
3310 | = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1) | |
3311 | / (PARM_BOUNDARY / BITS_PER_UNIT) | |
3312 | * (PARM_BOUNDARY / BITS_PER_UNIT)); | |
3313 | ||
3314 | if (! second_time) | |
3315 | move_block_from_reg (REGNO (entry_parm), | |
f31870c1 JW |
3316 | validize_mem (stack_parm), nregs, |
3317 | int_size_in_bytes (TREE_TYPE (parm))); | |
6f086dfc RS |
3318 | entry_parm = stack_parm; |
3319 | } | |
3320 | } | |
461beb10 | 3321 | #endif |
6f086dfc RS |
3322 | |
3323 | /* If we didn't decide this parm came in a register, | |
3324 | by default it came on the stack. */ | |
3325 | if (entry_parm == 0) | |
3326 | entry_parm = stack_parm; | |
3327 | ||
3328 | /* Record permanently how this parm was passed. */ | |
3329 | if (! second_time) | |
3330 | DECL_INCOMING_RTL (parm) = entry_parm; | |
3331 | ||
3332 | /* If there is actually space on the stack for this parm, | |
3333 | count it in stack_args_size; otherwise set stack_parm to 0 | |
3334 | to indicate there is no preallocated stack slot for the parm. */ | |
3335 | ||
3336 | if (entry_parm == stack_parm | |
d9ca49d5 | 3337 | #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE) |
6f086dfc | 3338 | /* On some machines, even if a parm value arrives in a register |
d9ca49d5 JW |
3339 | there is still an (uninitialized) stack slot allocated for it. |
3340 | ||
3341 | ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell | |
3342 | whether this parameter already has a stack slot allocated, | |
3343 | because an arg block exists only if current_function_args_size | |
3344 | is larger than some threshhold, and we haven't calculated that | |
3345 | yet. So, for now, we just assume that stack slots never exist | |
3346 | in this case. */ | |
6f086dfc RS |
3347 | || REG_PARM_STACK_SPACE (fndecl) > 0 |
3348 | #endif | |
3349 | ) | |
3350 | { | |
3351 | stack_args_size.constant += arg_size.constant; | |
3352 | if (arg_size.var) | |
3353 | ADD_PARM_SIZE (stack_args_size, arg_size.var); | |
3354 | } | |
3355 | else | |
3356 | /* No stack slot was pushed for this parm. */ | |
3357 | stack_parm = 0; | |
3358 | ||
3359 | /* Update info on where next arg arrives in registers. */ | |
3360 | ||
3361 | FUNCTION_ARG_ADVANCE (args_so_far, passed_mode, | |
3362 | passed_type, ! last_named); | |
3363 | ||
3364 | /* If this is our second time through, we are done with this parm. */ | |
3365 | if (second_time) | |
3366 | continue; | |
3367 | ||
e16c591a RS |
3368 | /* If we can't trust the parm stack slot to be aligned enough |
3369 | for its ultimate type, don't use that slot after entry. | |
3370 | We'll make another stack slot, if we need one. */ | |
3371 | { | |
e16c591a RS |
3372 | int thisparm_boundary |
3373 | = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type); | |
e16c591a RS |
3374 | |
3375 | if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary) | |
3376 | stack_parm = 0; | |
3377 | } | |
3378 | ||
cb61f66f RS |
3379 | /* If parm was passed in memory, and we need to convert it on entry, |
3380 | don't store it back in that same slot. */ | |
3381 | if (entry_parm != 0 | |
3382 | && nominal_mode != BLKmode && nominal_mode != passed_mode) | |
3383 | stack_parm = 0; | |
3384 | ||
3385 | #if 0 | |
6f086dfc RS |
3386 | /* Now adjust STACK_PARM to the mode and precise location |
3387 | where this parameter should live during execution, | |
3388 | if we discover that it must live in the stack during execution. | |
3389 | To make debuggers happier on big-endian machines, we store | |
3390 | the value in the last bytes of the space available. */ | |
3391 | ||
3392 | if (nominal_mode != BLKmode && nominal_mode != passed_mode | |
3393 | && stack_parm != 0) | |
3394 | { | |
3395 | rtx offset_rtx; | |
3396 | ||
3397 | #if BYTES_BIG_ENDIAN | |
3398 | if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD) | |
3399 | stack_offset.constant += (GET_MODE_SIZE (passed_mode) | |
3400 | - GET_MODE_SIZE (nominal_mode)); | |
3401 | #endif | |
3402 | ||
3403 | offset_rtx = ARGS_SIZE_RTX (stack_offset); | |
3404 | if (offset_rtx == const0_rtx) | |
3405 | stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer); | |
3406 | else | |
3407 | stack_parm = gen_rtx (MEM, nominal_mode, | |
3408 | gen_rtx (PLUS, Pmode, | |
3409 | internal_arg_pointer, offset_rtx)); | |
3410 | ||
3411 | /* If this is a memory ref that contains aggregate components, | |
3412 | mark it as such for cse and loop optimize. */ | |
3413 | MEM_IN_STRUCT_P (stack_parm) = aggregate; | |
3414 | } | |
cb61f66f | 3415 | #endif /* 0 */ |
6f086dfc RS |
3416 | |
3417 | /* ENTRY_PARM is an RTX for the parameter as it arrives, | |
3418 | in the mode in which it arrives. | |
3419 | STACK_PARM is an RTX for a stack slot where the parameter can live | |
3420 | during the function (in case we want to put it there). | |
3421 | STACK_PARM is 0 if no stack slot was pushed for it. | |
3422 | ||
3423 | Now output code if necessary to convert ENTRY_PARM to | |
3424 | the type in which this function declares it, | |
3425 | and store that result in an appropriate place, | |
3426 | which may be a pseudo reg, may be STACK_PARM, | |
3427 | or may be a local stack slot if STACK_PARM is 0. | |
3428 | ||
3429 | Set DECL_RTL to that place. */ | |
3430 | ||
3431 | if (nominal_mode == BLKmode) | |
3432 | { | |
3433 | /* If a BLKmode arrives in registers, copy it to a stack slot. */ | |
3434 | if (GET_CODE (entry_parm) == REG) | |
3435 | { | |
3436 | int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)), | |
3437 | UNITS_PER_WORD); | |
3438 | ||
3439 | /* Note that we will be storing an integral number of words. | |
3440 | So we have to be careful to ensure that we allocate an | |
3441 | integral number of words. We do this below in the | |
3442 | assign_stack_local if space was not allocated in the argument | |
3443 | list. If it was, this will not work if PARM_BOUNDARY is not | |
3444 | a multiple of BITS_PER_WORD. It isn't clear how to fix this | |
3445 | if it becomes a problem. */ | |
3446 | ||
3447 | if (stack_parm == 0) | |
7e41ffa2 RS |
3448 | { |
3449 | stack_parm | |
3450 | = assign_stack_local (GET_MODE (entry_parm), size_stored, 0); | |
3451 | /* If this is a memory ref that contains aggregate components, | |
3452 | mark it as such for cse and loop optimize. */ | |
3453 | MEM_IN_STRUCT_P (stack_parm) = aggregate; | |
3454 | } | |
3455 | ||
6f086dfc RS |
3456 | else if (PARM_BOUNDARY % BITS_PER_WORD != 0) |
3457 | abort (); | |
3458 | ||
3459 | move_block_from_reg (REGNO (entry_parm), | |
3460 | validize_mem (stack_parm), | |
f31870c1 JW |
3461 | size_stored / UNITS_PER_WORD, |
3462 | int_size_in_bytes (TREE_TYPE (parm))); | |
6f086dfc RS |
3463 | } |
3464 | DECL_RTL (parm) = stack_parm; | |
3465 | } | |
74bd77a8 | 3466 | else if (! ((obey_regdecls && ! DECL_REGISTER (parm) |
a82ad570 | 3467 | && ! DECL_INLINE (fndecl)) |
6f086dfc RS |
3468 | /* layout_decl may set this. */ |
3469 | || TREE_ADDRESSABLE (parm) | |
3470 | || TREE_SIDE_EFFECTS (parm) | |
3471 | /* If -ffloat-store specified, don't put explicit | |
3472 | float variables into registers. */ | |
3473 | || (flag_float_store | |
3474 | && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)) | |
3475 | /* Always assign pseudo to structure return or item passed | |
3476 | by invisible reference. */ | |
3477 | || passed_pointer || parm == function_result_decl) | |
3478 | { | |
00d8a4c1 RK |
3479 | /* Store the parm in a pseudoregister during the function, but we |
3480 | may need to do it in a wider mode. */ | |
3481 | ||
3482 | register rtx parmreg; | |
14aceb29 | 3483 | int regno; |
00d8a4c1 RK |
3484 | |
3485 | unsignedp = TREE_UNSIGNED (TREE_TYPE (parm)); | |
3486 | if (TREE_CODE (TREE_TYPE (parm)) == INTEGER_TYPE | |
3487 | || TREE_CODE (TREE_TYPE (parm)) == ENUMERAL_TYPE | |
3488 | || TREE_CODE (TREE_TYPE (parm)) == BOOLEAN_TYPE | |
3489 | || TREE_CODE (TREE_TYPE (parm)) == CHAR_TYPE | |
3490 | || TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE | |
3491 | || TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE | |
3492 | || TREE_CODE (TREE_TYPE (parm)) == OFFSET_TYPE) | |
3493 | { | |
3494 | PROMOTE_MODE (nominal_mode, unsignedp, TREE_TYPE (parm)); | |
3495 | } | |
6f086dfc | 3496 | |
00d8a4c1 | 3497 | parmreg = gen_reg_rtx (nominal_mode); |
6f086dfc RS |
3498 | REG_USERVAR_P (parmreg) = 1; |
3499 | ||
3500 | /* If this was an item that we received a pointer to, set DECL_RTL | |
3501 | appropriately. */ | |
3502 | if (passed_pointer) | |
3503 | { | |
3504 | DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg); | |
3505 | MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate; | |
3506 | } | |
3507 | else | |
3508 | DECL_RTL (parm) = parmreg; | |
3509 | ||
3510 | /* Copy the value into the register. */ | |
3511 | if (GET_MODE (parmreg) != GET_MODE (entry_parm)) | |
86f8eff3 RK |
3512 | { |
3513 | /* If ENTRY_PARM is a hard register, it might be in a register | |
3514 | not valid for operating in its mode (e.g., an odd-numbered | |
3515 | register for a DFmode). In that case, moves are the only | |
3516 | thing valid, so we can't do a convert from there. This | |
3517 | occurs when the calling sequence allow such misaligned | |
3412b298 JW |
3518 | usages. |
3519 | ||
3520 | In addition, the conversion may involve a call, which could | |
3521 | clobber parameters which haven't been copied to pseudo | |
3522 | registers yet. Therefore, we must first copy the parm to | |
3523 | a pseudo reg here, and save the conversion until after all | |
3524 | parameters have been moved. */ | |
3525 | ||
3526 | rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm)); | |
3527 | ||
3528 | emit_move_insn (tempreg, validize_mem (entry_parm)); | |
3529 | ||
3530 | push_to_sequence (conversion_insns); | |
8c394878 | 3531 | convert_move (parmreg, tempreg, unsignedp); |
3412b298 JW |
3532 | conversion_insns = get_insns (); |
3533 | end_sequence (); | |
86f8eff3 | 3534 | } |
6f086dfc RS |
3535 | else |
3536 | emit_move_insn (parmreg, validize_mem (entry_parm)); | |
3537 | ||
74bd77a8 RS |
3538 | /* If we were passed a pointer but the actual value |
3539 | can safely live in a register, put it in one. */ | |
16bae307 | 3540 | if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode |
74bd77a8 RS |
3541 | && ! ((obey_regdecls && ! DECL_REGISTER (parm) |
3542 | && ! DECL_INLINE (fndecl)) | |
3543 | /* layout_decl may set this. */ | |
3544 | || TREE_ADDRESSABLE (parm) | |
3545 | || TREE_SIDE_EFFECTS (parm) | |
3546 | /* If -ffloat-store specified, don't put explicit | |
3547 | float variables into registers. */ | |
3548 | || (flag_float_store | |
3549 | && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))) | |
3550 | { | |
2654605a JW |
3551 | /* We can't use nominal_mode, because it will have been set to |
3552 | Pmode above. We must use the actual mode of the parm. */ | |
3553 | parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm))); | |
74bd77a8 RS |
3554 | emit_move_insn (parmreg, DECL_RTL (parm)); |
3555 | DECL_RTL (parm) = parmreg; | |
c110c53d RS |
3556 | /* STACK_PARM is the pointer, not the parm, and PARMREG is |
3557 | now the parm. */ | |
3558 | stack_parm = 0; | |
74bd77a8 | 3559 | } |
137a2a7b DE |
3560 | #ifdef FUNCTION_ARG_CALLEE_COPIES |
3561 | /* If we are passed an arg by reference and it is our responsibility | |
3562 | to make a copy, do it now. | |
3563 | PASSED_TYPE and PASSED mode now refer to the pointer, not the | |
3564 | original argument, so we must recreate them in the call to | |
3565 | FUNCTION_ARG_CALLEE_COPIES. */ | |
3566 | /* ??? Later add code to handle the case that if the argument isn't | |
3567 | modified, don't do the copy. */ | |
3568 | ||
3569 | else if (passed_pointer | |
3570 | && FUNCTION_ARG_CALLEE_COPIES (args_so_far, | |
3571 | TYPE_MODE (DECL_ARG_TYPE (parm)), | |
3572 | DECL_ARG_TYPE (parm), | |
3573 | ! last_named)) | |
3574 | { | |
3575 | rtx copy; | |
3576 | tree type = DECL_ARG_TYPE (parm); | |
3577 | ||
3578 | /* This sequence may involve a library call perhaps clobbering | |
3579 | registers that haven't been copied to pseudos yet. */ | |
3580 | ||
3581 | push_to_sequence (conversion_insns); | |
3582 | ||
3583 | if (TYPE_SIZE (type) == 0 | |
3584 | || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) | |
3585 | { | |
3586 | /* This is a variable sized object. */ | |
3587 | /* ??? Can we use expr_size here? */ | |
3588 | rtx size_rtx = expand_expr (size_in_bytes (type), NULL_RTX, | |
49640f91 | 3589 | TYPE_MODE (sizetype), 0); |
137a2a7b DE |
3590 | |
3591 | copy = gen_rtx (MEM, BLKmode, | |
3592 | allocate_dynamic_stack_space (size_rtx, NULL_RTX, | |
3593 | TYPE_ALIGN (type))); | |
3594 | } | |
3595 | else | |
3596 | { | |
3597 | int size = int_size_in_bytes (type); | |
0b2187a9 | 3598 | copy = assign_stack_temp (TYPE_MODE (type), size, 1); |
137a2a7b DE |
3599 | } |
3600 | ||
3601 | store_expr (parm, copy, 0); | |
3602 | emit_move_insn (parmreg, XEXP (copy, 0)); | |
3603 | conversion_insns = get_insns (); | |
3604 | end_sequence (); | |
3605 | } | |
3606 | #endif /* FUNCTION_ARG_CALLEE_COPIES */ | |
74bd77a8 | 3607 | |
6f086dfc | 3608 | /* In any case, record the parm's desired stack location |
14aceb29 RS |
3609 | in case we later discover it must live in the stack. |
3610 | ||
3611 | If it is a COMPLEX value, store the stack location for both | |
3612 | halves. */ | |
3613 | ||
3614 | if (GET_CODE (parmreg) == CONCAT) | |
3615 | regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1))); | |
3616 | else | |
3617 | regno = REGNO (parmreg); | |
3618 | ||
3619 | if (regno >= nparmregs) | |
6f086dfc RS |
3620 | { |
3621 | rtx *new; | |
19fdd3ee | 3622 | int old_nparmregs = nparmregs; |
14aceb29 RS |
3623 | |
3624 | nparmregs = regno + 5; | |
6f086dfc | 3625 | new = (rtx *) oballoc (nparmregs * sizeof (rtx)); |
19fdd3ee | 3626 | bcopy (parm_reg_stack_loc, new, old_nparmregs * sizeof (rtx)); |
14aceb29 RS |
3627 | bzero (new + old_nparmregs, |
3628 | (nparmregs - old_nparmregs) * sizeof (rtx)); | |
6f086dfc RS |
3629 | parm_reg_stack_loc = new; |
3630 | } | |
14aceb29 RS |
3631 | |
3632 | if (GET_CODE (parmreg) == CONCAT) | |
3633 | { | |
3634 | enum machine_mode submode = GET_MODE (XEXP (parmreg, 0)); | |
3635 | ||
7b1a0c14 RS |
3636 | if (stack_parm != 0) |
3637 | { | |
3d329b07 RK |
3638 | parm_reg_stack_loc[REGNO (gen_realpart (submode, parmreg))] |
3639 | = gen_realpart (submode, stack_parm); | |
3640 | parm_reg_stack_loc[REGNO (gen_imagpart (submode, parmreg))] | |
3641 | = gen_imagpart (submode, stack_parm); | |
7b1a0c14 RS |
3642 | } |
3643 | else | |
3644 | { | |
3d329b07 | 3645 | parm_reg_stack_loc[REGNO (gen_realpart (submode, parmreg))] |
7b1a0c14 | 3646 | = 0; |
3d329b07 | 3647 | parm_reg_stack_loc[REGNO (gen_imagpart (submode, parmreg))] |
7b1a0c14 RS |
3648 | = 0; |
3649 | } | |
14aceb29 RS |
3650 | } |
3651 | else | |
3652 | parm_reg_stack_loc[REGNO (parmreg)] = stack_parm; | |
6f086dfc RS |
3653 | |
3654 | /* Mark the register as eliminable if we did no conversion | |
3655 | and it was copied from memory at a fixed offset, | |
3656 | and the arg pointer was not copied to a pseudo-reg. | |
3657 | If the arg pointer is a pseudo reg or the offset formed | |
3658 | an invalid address, such memory-equivalences | |
3659 | as we make here would screw up life analysis for it. */ | |
3660 | if (nominal_mode == passed_mode | |
3661 | && GET_CODE (entry_parm) == MEM | |
e16c591a | 3662 | && entry_parm == stack_parm |
6f086dfc RS |
3663 | && stack_offset.var == 0 |
3664 | && reg_mentioned_p (virtual_incoming_args_rtx, | |
3665 | XEXP (entry_parm, 0))) | |
3666 | REG_NOTES (get_last_insn ()) | |
3667 | = gen_rtx (EXPR_LIST, REG_EQUIV, | |
3668 | entry_parm, REG_NOTES (get_last_insn ())); | |
3669 | ||
3670 | /* For pointer data type, suggest pointer register. */ | |
3671 | if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE) | |
3672 | mark_reg_pointer (parmreg); | |
3673 | } | |
3674 | else | |
3675 | { | |
3676 | /* Value must be stored in the stack slot STACK_PARM | |
3677 | during function execution. */ | |
3678 | ||
3679 | if (passed_mode != nominal_mode) | |
86f8eff3 RK |
3680 | { |
3681 | /* Conversion is required. */ | |
3412b298 JW |
3682 | rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm)); |
3683 | ||
3684 | emit_move_insn (tempreg, validize_mem (entry_parm)); | |
86f8eff3 | 3685 | |
3412b298 JW |
3686 | push_to_sequence (conversion_insns); |
3687 | entry_parm = convert_to_mode (nominal_mode, tempreg, | |
a53e14c0 | 3688 | TREE_UNSIGNED (TREE_TYPE (parm))); |
3412b298 JW |
3689 | conversion_insns = get_insns (); |
3690 | end_sequence (); | |
86f8eff3 | 3691 | } |
6f086dfc RS |
3692 | |
3693 | if (entry_parm != stack_parm) | |
3694 | { | |
3695 | if (stack_parm == 0) | |
7e41ffa2 RS |
3696 | { |
3697 | stack_parm | |
3698 | = assign_stack_local (GET_MODE (entry_parm), | |
3699 | GET_MODE_SIZE (GET_MODE (entry_parm)), 0); | |
3700 | /* If this is a memory ref that contains aggregate components, | |
3701 | mark it as such for cse and loop optimize. */ | |
3702 | MEM_IN_STRUCT_P (stack_parm) = aggregate; | |
3703 | } | |
3704 | ||
3412b298 JW |
3705 | if (passed_mode != nominal_mode) |
3706 | { | |
3707 | push_to_sequence (conversion_insns); | |
3708 | emit_move_insn (validize_mem (stack_parm), | |
3709 | validize_mem (entry_parm)); | |
3710 | conversion_insns = get_insns (); | |
3711 | end_sequence (); | |
3712 | } | |
3713 | else | |
3714 | emit_move_insn (validize_mem (stack_parm), | |
3715 | validize_mem (entry_parm)); | |
6f086dfc RS |
3716 | } |
3717 | ||
3718 | DECL_RTL (parm) = stack_parm; | |
3719 | } | |
3720 | ||
3721 | /* If this "parameter" was the place where we are receiving the | |
3722 | function's incoming structure pointer, set up the result. */ | |
3723 | if (parm == function_result_decl) | |
ccdecf58 RK |
3724 | { |
3725 | tree result = DECL_RESULT (fndecl); | |
3726 | tree restype = TREE_TYPE (result); | |
3727 | ||
3728 | DECL_RTL (result) | |
3729 | = gen_rtx (MEM, DECL_MODE (result), DECL_RTL (parm)); | |
3730 | ||
3731 | MEM_IN_STRUCT_P (DECL_RTL (result)) | |
3732 | = (TREE_CODE (restype) == RECORD_TYPE | |
3733 | || TREE_CODE (restype) == UNION_TYPE | |
3734 | || TREE_CODE (restype) == QUAL_UNION_TYPE | |
3735 | || TREE_CODE (restype) == ARRAY_TYPE); | |
3736 | } | |
6f086dfc RS |
3737 | |
3738 | if (TREE_THIS_VOLATILE (parm)) | |
3739 | MEM_VOLATILE_P (DECL_RTL (parm)) = 1; | |
3740 | if (TREE_READONLY (parm)) | |
3741 | RTX_UNCHANGING_P (DECL_RTL (parm)) = 1; | |
3742 | } | |
3743 | ||
3412b298 JW |
3744 | /* Output all parameter conversion instructions (possibly including calls) |
3745 | now that all parameters have been copied out of hard registers. */ | |
3746 | emit_insns (conversion_insns); | |
3747 | ||
6f086dfc RS |
3748 | max_parm_reg = max_reg_num (); |
3749 | last_parm_insn = get_last_insn (); | |
3750 | ||
3751 | current_function_args_size = stack_args_size.constant; | |
3752 | ||
3753 | /* Adjust function incoming argument size for alignment and | |
3754 | minimum length. */ | |
3755 | ||
3756 | #ifdef REG_PARM_STACK_SPACE | |
6f90e075 | 3757 | #ifndef MAYBE_REG_PARM_STACK_SPACE |
6f086dfc RS |
3758 | current_function_args_size = MAX (current_function_args_size, |
3759 | REG_PARM_STACK_SPACE (fndecl)); | |
3760 | #endif | |
6f90e075 | 3761 | #endif |
6f086dfc RS |
3762 | |
3763 | #ifdef STACK_BOUNDARY | |
3764 | #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT) | |
3765 | ||
3766 | current_function_args_size | |
3767 | = ((current_function_args_size + STACK_BYTES - 1) | |
3768 | / STACK_BYTES) * STACK_BYTES; | |
3769 | #endif | |
3770 | ||
3771 | #ifdef ARGS_GROW_DOWNWARD | |
3772 | current_function_arg_offset_rtx | |
5f4f0e22 | 3773 | = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant) |
6f086dfc RS |
3774 | : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var, |
3775 | size_int (-stack_args_size.constant)), | |
5f4f0e22 | 3776 | NULL_RTX, VOIDmode, 0)); |
6f086dfc RS |
3777 | #else |
3778 | current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size); | |
3779 | #endif | |
3780 | ||
3781 | /* See how many bytes, if any, of its args a function should try to pop | |
3782 | on return. */ | |
3783 | ||
3784 | current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl), | |
3785 | current_function_args_size); | |
3786 | ||
3787 | /* For stdarg.h function, save info about regs and stack space | |
3788 | used by the named args. */ | |
3789 | ||
3790 | if (stdarg) | |
3791 | current_function_args_info = args_so_far; | |
3792 | ||
3793 | /* Set the rtx used for the function return value. Put this in its | |
3794 | own variable so any optimizers that need this information don't have | |
3795 | to include tree.h. Do this here so it gets done when an inlined | |
3796 | function gets output. */ | |
3797 | ||
3798 | current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl)); | |
3799 | } | |
3800 | \f | |
75dc3319 RK |
3801 | /* Indicate whether REGNO is an incoming argument to the current function |
3802 | that was promoted to a wider mode. If so, return the RTX for the | |
3803 | register (to get its mode). PMODE and PUNSIGNEDP are set to the mode | |
3804 | that REGNO is promoted from and whether the promotion was signed or | |
3805 | unsigned. */ | |
3806 | ||
3807 | #ifdef PROMOTE_FUNCTION_ARGS | |
3808 | ||
3809 | rtx | |
3810 | promoted_input_arg (regno, pmode, punsignedp) | |
3811 | int regno; | |
3812 | enum machine_mode *pmode; | |
3813 | int *punsignedp; | |
3814 | { | |
3815 | tree arg; | |
3816 | ||
3817 | for (arg = DECL_ARGUMENTS (current_function_decl); arg; | |
3818 | arg = TREE_CHAIN (arg)) | |
3819 | if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG | |
3820 | && REGNO (DECL_INCOMING_RTL (arg)) == regno | |
3821 | && (TREE_CODE (TREE_TYPE (arg)) == INTEGER_TYPE | |
3822 | || TREE_CODE (TREE_TYPE (arg)) == ENUMERAL_TYPE | |
3823 | || TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE | |
3824 | || TREE_CODE (TREE_TYPE (arg)) == CHAR_TYPE | |
3825 | || TREE_CODE (TREE_TYPE (arg)) == REAL_TYPE | |
3826 | || TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE | |
3827 | || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE)) | |
3828 | { | |
3829 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg)); | |
3830 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg)); | |
3831 | ||
3832 | PROMOTE_MODE (mode, unsignedp, TREE_TYPE (arg)); | |
3833 | if (mode == GET_MODE (DECL_INCOMING_RTL (arg)) | |
3834 | && mode != DECL_MODE (arg)) | |
3835 | { | |
3836 | *pmode = DECL_MODE (arg); | |
3837 | *punsignedp = unsignedp; | |
3838 | return DECL_INCOMING_RTL (arg); | |
3839 | } | |
3840 | } | |
3841 | ||
3842 | return 0; | |
3843 | } | |
3844 | ||
3845 | #endif | |
3846 | \f | |
6f086dfc RS |
3847 | /* Compute the size and offset from the start of the stacked arguments for a |
3848 | parm passed in mode PASSED_MODE and with type TYPE. | |
3849 | ||
3850 | INITIAL_OFFSET_PTR points to the current offset into the stacked | |
3851 | arguments. | |
3852 | ||
3853 | The starting offset and size for this parm are returned in *OFFSET_PTR | |
3854 | and *ARG_SIZE_PTR, respectively. | |
3855 | ||
3856 | IN_REGS is non-zero if the argument will be passed in registers. It will | |
3857 | never be set if REG_PARM_STACK_SPACE is not defined. | |
3858 | ||
3859 | FNDECL is the function in which the argument was defined. | |
3860 | ||
3861 | There are two types of rounding that are done. The first, controlled by | |
3862 | FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument | |
3863 | list to be aligned to the specific boundary (in bits). This rounding | |
3864 | affects the initial and starting offsets, but not the argument size. | |
3865 | ||
3866 | The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY, | |
3867 | optionally rounds the size of the parm to PARM_BOUNDARY. The | |
3868 | initial offset is not affected by this rounding, while the size always | |
3869 | is and the starting offset may be. */ | |
3870 | ||
3871 | /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case; | |
3872 | initial_offset_ptr is positive because locate_and_pad_parm's | |
3873 | callers pass in the total size of args so far as | |
3874 | initial_offset_ptr. arg_size_ptr is always positive.*/ | |
3875 | ||
3876 | static void pad_to_arg_alignment (), pad_below (); | |
3877 | ||
3878 | void | |
3879 | locate_and_pad_parm (passed_mode, type, in_regs, fndecl, | |
3880 | initial_offset_ptr, offset_ptr, arg_size_ptr) | |
3881 | enum machine_mode passed_mode; | |
3882 | tree type; | |
3883 | int in_regs; | |
3884 | tree fndecl; | |
3885 | struct args_size *initial_offset_ptr; | |
3886 | struct args_size *offset_ptr; | |
3887 | struct args_size *arg_size_ptr; | |
3888 | { | |
3889 | tree sizetree | |
3890 | = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode)); | |
3891 | enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type); | |
3892 | int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type); | |
3893 | int boundary_in_bytes = boundary / BITS_PER_UNIT; | |
3894 | int reg_parm_stack_space = 0; | |
3895 | ||
3896 | #ifdef REG_PARM_STACK_SPACE | |
3897 | /* If we have found a stack parm before we reach the end of the | |
3898 | area reserved for registers, skip that area. */ | |
3899 | if (! in_regs) | |
3900 | { | |
29008b51 JW |
3901 | #ifdef MAYBE_REG_PARM_STACK_SPACE |
3902 | reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE; | |
3903 | #else | |
6f086dfc | 3904 | reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl); |
29008b51 | 3905 | #endif |
6f086dfc RS |
3906 | if (reg_parm_stack_space > 0) |
3907 | { | |
3908 | if (initial_offset_ptr->var) | |
3909 | { | |
3910 | initial_offset_ptr->var | |
3911 | = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr), | |
3912 | size_int (reg_parm_stack_space)); | |
3913 | initial_offset_ptr->constant = 0; | |
3914 | } | |
3915 | else if (initial_offset_ptr->constant < reg_parm_stack_space) | |
3916 | initial_offset_ptr->constant = reg_parm_stack_space; | |
3917 | } | |
3918 | } | |
3919 | #endif /* REG_PARM_STACK_SPACE */ | |
3920 | ||
3921 | arg_size_ptr->var = 0; | |
3922 | arg_size_ptr->constant = 0; | |
3923 | ||
3924 | #ifdef ARGS_GROW_DOWNWARD | |
3925 | if (initial_offset_ptr->var) | |
3926 | { | |
3927 | offset_ptr->constant = 0; | |
3928 | offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node, | |
3929 | initial_offset_ptr->var); | |
3930 | } | |
3931 | else | |
3932 | { | |
3933 | offset_ptr->constant = - initial_offset_ptr->constant; | |
3934 | offset_ptr->var = 0; | |
3935 | } | |
3936 | if (where_pad == upward | |
3937 | && (TREE_CODE (sizetree) != INTEGER_CST | |
3938 | || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY))) | |
3939 | sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); | |
3940 | SUB_PARM_SIZE (*offset_ptr, sizetree); | |
66bcbe19 TG |
3941 | if (where_pad != downward) |
3942 | pad_to_arg_alignment (offset_ptr, boundary); | |
6f086dfc RS |
3943 | if (initial_offset_ptr->var) |
3944 | { | |
3945 | arg_size_ptr->var = size_binop (MINUS_EXPR, | |
3946 | size_binop (MINUS_EXPR, | |
3947 | integer_zero_node, | |
3948 | initial_offset_ptr->var), | |
3949 | offset_ptr->var); | |
3950 | } | |
3951 | else | |
3952 | { | |
3953 | arg_size_ptr->constant = (- initial_offset_ptr->constant - | |
3954 | offset_ptr->constant); | |
3955 | } | |
3956 | /* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */ | |
3957 | if (where_pad == downward) | |
3958 | pad_below (arg_size_ptr, passed_mode, sizetree); | |
3959 | #else /* !ARGS_GROW_DOWNWARD */ | |
3960 | pad_to_arg_alignment (initial_offset_ptr, boundary); | |
3961 | *offset_ptr = *initial_offset_ptr; | |
6f086dfc RS |
3962 | |
3963 | #ifdef PUSH_ROUNDING | |
3964 | if (passed_mode != BLKmode) | |
3965 | sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree))); | |
3966 | #endif | |
3967 | ||
3968 | if (where_pad != none | |
3969 | && (TREE_CODE (sizetree) != INTEGER_CST | |
3970 | || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY))) | |
3971 | sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); | |
3972 | ||
f31870c1 JW |
3973 | /* This must be done after rounding sizetree, so that it will subtract |
3974 | the same value that we explicitly add below. */ | |
3975 | if (where_pad == downward) | |
3976 | pad_below (offset_ptr, passed_mode, sizetree); | |
6f086dfc RS |
3977 | ADD_PARM_SIZE (*arg_size_ptr, sizetree); |
3978 | #endif /* ARGS_GROW_DOWNWARD */ | |
3979 | } | |
3980 | ||
e16c591a RS |
3981 | /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY. |
3982 | BOUNDARY is measured in bits, but must be a multiple of a storage unit. */ | |
3983 | ||
6f086dfc RS |
3984 | static void |
3985 | pad_to_arg_alignment (offset_ptr, boundary) | |
3986 | struct args_size *offset_ptr; | |
3987 | int boundary; | |
3988 | { | |
3989 | int boundary_in_bytes = boundary / BITS_PER_UNIT; | |
3990 | ||
3991 | if (boundary > BITS_PER_UNIT) | |
3992 | { | |
3993 | if (offset_ptr->var) | |
3994 | { | |
3995 | offset_ptr->var = | |
3996 | #ifdef ARGS_GROW_DOWNWARD | |
3997 | round_down | |
3998 | #else | |
3999 | round_up | |
4000 | #endif | |
4001 | (ARGS_SIZE_TREE (*offset_ptr), | |
4002 | boundary / BITS_PER_UNIT); | |
4003 | offset_ptr->constant = 0; /*?*/ | |
4004 | } | |
4005 | else | |
4006 | offset_ptr->constant = | |
4007 | #ifdef ARGS_GROW_DOWNWARD | |
4008 | FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes); | |
4009 | #else | |
4010 | CEIL_ROUND (offset_ptr->constant, boundary_in_bytes); | |
4011 | #endif | |
4012 | } | |
4013 | } | |
4014 | ||
4015 | static void | |
4016 | pad_below (offset_ptr, passed_mode, sizetree) | |
4017 | struct args_size *offset_ptr; | |
4018 | enum machine_mode passed_mode; | |
4019 | tree sizetree; | |
4020 | { | |
4021 | if (passed_mode != BLKmode) | |
4022 | { | |
4023 | if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY) | |
4024 | offset_ptr->constant | |
4025 | += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1) | |
4026 | / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT) | |
4027 | - GET_MODE_SIZE (passed_mode)); | |
4028 | } | |
4029 | else | |
4030 | { | |
4031 | if (TREE_CODE (sizetree) != INTEGER_CST | |
4032 | || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY) | |
4033 | { | |
4034 | /* Round the size up to multiple of PARM_BOUNDARY bits. */ | |
4035 | tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); | |
4036 | /* Add it in. */ | |
4037 | ADD_PARM_SIZE (*offset_ptr, s2); | |
4038 | SUB_PARM_SIZE (*offset_ptr, sizetree); | |
4039 | } | |
4040 | } | |
4041 | } | |
4042 | ||
4043 | static tree | |
4044 | round_down (value, divisor) | |
4045 | tree value; | |
4046 | int divisor; | |
4047 | { | |
4048 | return size_binop (MULT_EXPR, | |
4049 | size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)), | |
4050 | size_int (divisor)); | |
4051 | } | |
4052 | \f | |
4053 | /* Walk the tree of blocks describing the binding levels within a function | |
4054 | and warn about uninitialized variables. | |
4055 | This is done after calling flow_analysis and before global_alloc | |
4056 | clobbers the pseudo-regs to hard regs. */ | |
4057 | ||
4058 | void | |
4059 | uninitialized_vars_warning (block) | |
4060 | tree block; | |
4061 | { | |
4062 | register tree decl, sub; | |
4063 | for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl)) | |
4064 | { | |
4065 | if (TREE_CODE (decl) == VAR_DECL | |
4066 | /* These warnings are unreliable for and aggregates | |
4067 | because assigning the fields one by one can fail to convince | |
4068 | flow.c that the entire aggregate was initialized. | |
4069 | Unions are troublesome because members may be shorter. */ | |
4070 | && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE | |
4071 | && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE | |
c1b98a95 | 4072 | && TREE_CODE (TREE_TYPE (decl)) != QUAL_UNION_TYPE |
6f086dfc RS |
4073 | && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE |
4074 | && DECL_RTL (decl) != 0 | |
4075 | && GET_CODE (DECL_RTL (decl)) == REG | |
4076 | && regno_uninitialized (REGNO (DECL_RTL (decl)))) | |
4077 | warning_with_decl (decl, | |
4078 | "`%s' may be used uninitialized in this function"); | |
4079 | if (TREE_CODE (decl) == VAR_DECL | |
4080 | && DECL_RTL (decl) != 0 | |
4081 | && GET_CODE (DECL_RTL (decl)) == REG | |
4082 | && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) | |
4083 | warning_with_decl (decl, | |
f1b985b7 | 4084 | "variable `%s' may be clobbered by `longjmp' or `vfork'"); |
6f086dfc RS |
4085 | } |
4086 | for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub)) | |
4087 | uninitialized_vars_warning (sub); | |
4088 | } | |
4089 | ||
4090 | /* Do the appropriate part of uninitialized_vars_warning | |
4091 | but for arguments instead of local variables. */ | |
4092 | ||
4093 | void | |
4094 | setjmp_args_warning (block) | |
4095 | tree block; | |
4096 | { | |
4097 | register tree decl; | |
4098 | for (decl = DECL_ARGUMENTS (current_function_decl); | |
4099 | decl; decl = TREE_CHAIN (decl)) | |
4100 | if (DECL_RTL (decl) != 0 | |
4101 | && GET_CODE (DECL_RTL (decl)) == REG | |
4102 | && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) | |
f1b985b7 | 4103 | warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp' or `vfork'"); |
6f086dfc RS |
4104 | } |
4105 | ||
4106 | /* If this function call setjmp, put all vars into the stack | |
4107 | unless they were declared `register'. */ | |
4108 | ||
4109 | void | |
4110 | setjmp_protect (block) | |
4111 | tree block; | |
4112 | { | |
4113 | register tree decl, sub; | |
4114 | for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl)) | |
4115 | if ((TREE_CODE (decl) == VAR_DECL | |
4116 | || TREE_CODE (decl) == PARM_DECL) | |
4117 | && DECL_RTL (decl) != 0 | |
4118 | && GET_CODE (DECL_RTL (decl)) == REG | |
b335c2cc TW |
4119 | /* If this variable came from an inline function, it must be |
4120 | that it's life doesn't overlap the setjmp. If there was a | |
4121 | setjmp in the function, it would already be in memory. We | |
4122 | must exclude such variable because their DECL_RTL might be | |
4123 | set to strange things such as virtual_stack_vars_rtx. */ | |
4124 | && ! DECL_FROM_INLINE (decl) | |
6f086dfc RS |
4125 | && ( |
4126 | #ifdef NON_SAVING_SETJMP | |
4127 | /* If longjmp doesn't restore the registers, | |
4128 | don't put anything in them. */ | |
4129 | NON_SAVING_SETJMP | |
4130 | || | |
4131 | #endif | |
a82ad570 | 4132 | ! DECL_REGISTER (decl))) |
6f086dfc RS |
4133 | put_var_into_stack (decl); |
4134 | for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub)) | |
4135 | setjmp_protect (sub); | |
4136 | } | |
4137 | \f | |
4138 | /* Like the previous function, but for args instead of local variables. */ | |
4139 | ||
4140 | void | |
4141 | setjmp_protect_args () | |
4142 | { | |
4143 | register tree decl, sub; | |
4144 | for (decl = DECL_ARGUMENTS (current_function_decl); | |
4145 | decl; decl = TREE_CHAIN (decl)) | |
4146 | if ((TREE_CODE (decl) == VAR_DECL | |
4147 | || TREE_CODE (decl) == PARM_DECL) | |
4148 | && DECL_RTL (decl) != 0 | |
4149 | && GET_CODE (DECL_RTL (decl)) == REG | |
4150 | && ( | |
4151 | /* If longjmp doesn't restore the registers, | |
4152 | don't put anything in them. */ | |
4153 | #ifdef NON_SAVING_SETJMP | |
4154 | NON_SAVING_SETJMP | |
4155 | || | |
4156 | #endif | |
a82ad570 | 4157 | ! DECL_REGISTER (decl))) |
6f086dfc RS |
4158 | put_var_into_stack (decl); |
4159 | } | |
4160 | \f | |
4161 | /* Return the context-pointer register corresponding to DECL, | |
4162 | or 0 if it does not need one. */ | |
4163 | ||
4164 | rtx | |
4165 | lookup_static_chain (decl) | |
4166 | tree decl; | |
4167 | { | |
4168 | tree context = decl_function_context (decl); | |
4169 | tree link; | |
4170 | ||
4171 | if (context == 0) | |
4172 | return 0; | |
4173 | ||
4174 | /* We treat inline_function_decl as an alias for the current function | |
4175 | because that is the inline function whose vars, types, etc. | |
4176 | are being merged into the current function. | |
4177 | See expand_inline_function. */ | |
4178 | if (context == current_function_decl || context == inline_function_decl) | |
4179 | return virtual_stack_vars_rtx; | |
4180 | ||
4181 | for (link = context_display; link; link = TREE_CHAIN (link)) | |
4182 | if (TREE_PURPOSE (link) == context) | |
4183 | return RTL_EXPR_RTL (TREE_VALUE (link)); | |
4184 | ||
4185 | abort (); | |
4186 | } | |
4187 | \f | |
4188 | /* Convert a stack slot address ADDR for variable VAR | |
4189 | (from a containing function) | |
4190 | into an address valid in this function (using a static chain). */ | |
4191 | ||
4192 | rtx | |
4193 | fix_lexical_addr (addr, var) | |
4194 | rtx addr; | |
4195 | tree var; | |
4196 | { | |
4197 | rtx basereg; | |
4198 | int displacement; | |
4199 | tree context = decl_function_context (var); | |
4200 | struct function *fp; | |
4201 | rtx base = 0; | |
4202 | ||
4203 | /* If this is the present function, we need not do anything. */ | |
4204 | if (context == current_function_decl || context == inline_function_decl) | |
4205 | return addr; | |
4206 | ||
4207 | for (fp = outer_function_chain; fp; fp = fp->next) | |
4208 | if (fp->decl == context) | |
4209 | break; | |
4210 | ||
4211 | if (fp == 0) | |
4212 | abort (); | |
4213 | ||
4214 | /* Decode given address as base reg plus displacement. */ | |
4215 | if (GET_CODE (addr) == REG) | |
4216 | basereg = addr, displacement = 0; | |
4217 | else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT) | |
4218 | basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1)); | |
4219 | else | |
4220 | abort (); | |
4221 | ||
4222 | /* We accept vars reached via the containing function's | |
4223 | incoming arg pointer and via its stack variables pointer. */ | |
4224 | if (basereg == fp->internal_arg_pointer) | |
4225 | { | |
4226 | /* If reached via arg pointer, get the arg pointer value | |
4227 | out of that function's stack frame. | |
4228 | ||
4229 | There are two cases: If a separate ap is needed, allocate a | |
4230 | slot in the outer function for it and dereference it that way. | |
4231 | This is correct even if the real ap is actually a pseudo. | |
4232 | Otherwise, just adjust the offset from the frame pointer to | |
4233 | compensate. */ | |
4234 | ||
4235 | #ifdef NEED_SEPARATE_AP | |
4236 | rtx addr; | |
4237 | ||
4238 | if (fp->arg_pointer_save_area == 0) | |
4239 | fp->arg_pointer_save_area | |
4240 | = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp); | |
4241 | ||
4242 | addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var); | |
4243 | addr = memory_address (Pmode, addr); | |
4244 | ||
4245 | base = copy_to_reg (gen_rtx (MEM, Pmode, addr)); | |
4246 | #else | |
4247 | displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET); | |
86f8eff3 | 4248 | base = lookup_static_chain (var); |
6f086dfc RS |
4249 | #endif |
4250 | } | |
4251 | ||
4252 | else if (basereg == virtual_stack_vars_rtx) | |
4253 | { | |
4254 | /* This is the same code as lookup_static_chain, duplicated here to | |
4255 | avoid an extra call to decl_function_context. */ | |
4256 | tree link; | |
4257 | ||
4258 | for (link = context_display; link; link = TREE_CHAIN (link)) | |
4259 | if (TREE_PURPOSE (link) == context) | |
4260 | { | |
4261 | base = RTL_EXPR_RTL (TREE_VALUE (link)); | |
4262 | break; | |
4263 | } | |
4264 | } | |
4265 | ||
4266 | if (base == 0) | |
4267 | abort (); | |
4268 | ||
4269 | /* Use same offset, relative to appropriate static chain or argument | |
4270 | pointer. */ | |
4271 | return plus_constant (base, displacement); | |
4272 | } | |
4273 | \f | |
4274 | /* Return the address of the trampoline for entering nested fn FUNCTION. | |
4275 | If necessary, allocate a trampoline (in the stack frame) | |
4276 | and emit rtl to initialize its contents (at entry to this function). */ | |
4277 | ||
4278 | rtx | |
4279 | trampoline_address (function) | |
4280 | tree function; | |
4281 | { | |
4282 | tree link; | |
4283 | tree rtlexp; | |
4284 | rtx tramp; | |
4285 | struct function *fp; | |
4286 | tree fn_context; | |
4287 | ||
4288 | /* Find an existing trampoline and return it. */ | |
4289 | for (link = trampoline_list; link; link = TREE_CHAIN (link)) | |
4290 | if (TREE_PURPOSE (link) == function) | |
4291 | return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0); | |
4292 | for (fp = outer_function_chain; fp; fp = fp->next) | |
4293 | for (link = fp->trampoline_list; link; link = TREE_CHAIN (link)) | |
4294 | if (TREE_PURPOSE (link) == function) | |
4295 | { | |
4296 | tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0), | |
4297 | function); | |
4298 | return round_trampoline_addr (tramp); | |
4299 | } | |
4300 | ||
4301 | /* None exists; we must make one. */ | |
4302 | ||
4303 | /* Find the `struct function' for the function containing FUNCTION. */ | |
4304 | fp = 0; | |
4305 | fn_context = decl_function_context (function); | |
4306 | if (fn_context != current_function_decl) | |
4307 | for (fp = outer_function_chain; fp; fp = fp->next) | |
4308 | if (fp->decl == fn_context) | |
4309 | break; | |
4310 | ||
4311 | /* Allocate run-time space for this trampoline | |
4312 | (usually in the defining function's stack frame). */ | |
4313 | #ifdef ALLOCATE_TRAMPOLINE | |
4314 | tramp = ALLOCATE_TRAMPOLINE (fp); | |
4315 | #else | |
4316 | /* If rounding needed, allocate extra space | |
4317 | to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */ | |
4318 | #ifdef TRAMPOLINE_ALIGNMENT | |
4319 | #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1) | |
4320 | #else | |
4321 | #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE) | |
4322 | #endif | |
4323 | if (fp != 0) | |
4324 | tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp); | |
4325 | else | |
4326 | tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0); | |
4327 | #endif | |
4328 | ||
4329 | /* Record the trampoline for reuse and note it for later initialization | |
4330 | by expand_function_end. */ | |
4331 | if (fp != 0) | |
4332 | { | |
28498644 RK |
4333 | push_obstacks (fp->function_maybepermanent_obstack, |
4334 | fp->function_maybepermanent_obstack); | |
6f086dfc RS |
4335 | rtlexp = make_node (RTL_EXPR); |
4336 | RTL_EXPR_RTL (rtlexp) = tramp; | |
4337 | fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list); | |
4338 | pop_obstacks (); | |
4339 | } | |
4340 | else | |
4341 | { | |
4342 | /* Make the RTL_EXPR node temporary, not momentary, so that the | |
4343 | trampoline_list doesn't become garbage. */ | |
4344 | int momentary = suspend_momentary (); | |
4345 | rtlexp = make_node (RTL_EXPR); | |
4346 | resume_momentary (momentary); | |
4347 | ||
4348 | RTL_EXPR_RTL (rtlexp) = tramp; | |
4349 | trampoline_list = tree_cons (function, rtlexp, trampoline_list); | |
4350 | } | |
4351 | ||
4352 | tramp = fix_lexical_addr (XEXP (tramp, 0), function); | |
4353 | return round_trampoline_addr (tramp); | |
4354 | } | |
4355 | ||
4356 | /* Given a trampoline address, | |
4357 | round it to multiple of TRAMPOLINE_ALIGNMENT. */ | |
4358 | ||
4359 | static rtx | |
4360 | round_trampoline_addr (tramp) | |
4361 | rtx tramp; | |
4362 | { | |
4363 | #ifdef TRAMPOLINE_ALIGNMENT | |
4364 | /* Round address up to desired boundary. */ | |
4365 | rtx temp = gen_reg_rtx (Pmode); | |
4366 | temp = expand_binop (Pmode, add_optab, tramp, | |
5f4f0e22 | 4367 | GEN_INT (TRAMPOLINE_ALIGNMENT - 1), |
6f086dfc RS |
4368 | temp, 0, OPTAB_LIB_WIDEN); |
4369 | tramp = expand_binop (Pmode, and_optab, temp, | |
5f4f0e22 | 4370 | GEN_INT (- TRAMPOLINE_ALIGNMENT), |
6f086dfc RS |
4371 | temp, 0, OPTAB_LIB_WIDEN); |
4372 | #endif | |
4373 | return tramp; | |
4374 | } | |
4375 | \f | |
467456d0 RS |
4376 | /* The functions identify_blocks and reorder_blocks provide a way to |
4377 | reorder the tree of BLOCK nodes, for optimizers that reshuffle or | |
4378 | duplicate portions of the RTL code. Call identify_blocks before | |
4379 | changing the RTL, and call reorder_blocks after. */ | |
4380 | ||
4381 | static int all_blocks (); | |
4382 | static tree blocks_nreverse (); | |
4383 | ||
4384 | /* Put all this function's BLOCK nodes into a vector, and return it. | |
4385 | Also store in each NOTE for the beginning or end of a block | |
4386 | the index of that block in the vector. | |
4387 | The arguments are TOP_BLOCK, the top-level block of the function, | |
4388 | and INSNS, the insn chain of the function. */ | |
4389 | ||
4390 | tree * | |
4391 | identify_blocks (top_block, insns) | |
4392 | tree top_block; | |
4393 | rtx insns; | |
4394 | { | |
fc289cd1 JW |
4395 | int n_blocks; |
4396 | tree *block_vector; | |
4397 | int *block_stack; | |
467456d0 RS |
4398 | int depth = 0; |
4399 | int next_block_number = 0; | |
4400 | int current_block_number = 0; | |
4401 | rtx insn; | |
4402 | ||
fc289cd1 JW |
4403 | if (top_block == 0) |
4404 | return 0; | |
4405 | ||
4406 | n_blocks = all_blocks (top_block, 0); | |
4407 | block_vector = (tree *) xmalloc (n_blocks * sizeof (tree)); | |
4408 | block_stack = (int *) alloca (n_blocks * sizeof (int)); | |
4409 | ||
467456d0 RS |
4410 | all_blocks (top_block, block_vector); |
4411 | ||
4412 | for (insn = insns; insn; insn = NEXT_INSN (insn)) | |
4413 | if (GET_CODE (insn) == NOTE) | |
4414 | { | |
4415 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG) | |
4416 | { | |
4417 | block_stack[depth++] = current_block_number; | |
4418 | current_block_number = next_block_number; | |
1b2ac438 | 4419 | NOTE_BLOCK_NUMBER (insn) = next_block_number++; |
467456d0 RS |
4420 | } |
4421 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END) | |
4422 | { | |
4423 | current_block_number = block_stack[--depth]; | |
1b2ac438 | 4424 | NOTE_BLOCK_NUMBER (insn) = current_block_number; |
467456d0 RS |
4425 | } |
4426 | } | |
4427 | ||
4428 | return block_vector; | |
4429 | } | |
4430 | ||
4431 | /* Given BLOCK_VECTOR which was returned by identify_blocks, | |
4432 | and a revised instruction chain, rebuild the tree structure | |
4433 | of BLOCK nodes to correspond to the new order of RTL. | |
fc289cd1 | 4434 | The new block tree is inserted below TOP_BLOCK. |
467456d0 RS |
4435 | Returns the current top-level block. */ |
4436 | ||
4437 | tree | |
fc289cd1 | 4438 | reorder_blocks (block_vector, top_block, insns) |
467456d0 | 4439 | tree *block_vector; |
fc289cd1 | 4440 | tree top_block; |
467456d0 RS |
4441 | rtx insns; |
4442 | { | |
fc289cd1 | 4443 | tree current_block = top_block; |
467456d0 RS |
4444 | rtx insn; |
4445 | ||
fc289cd1 JW |
4446 | if (block_vector == 0) |
4447 | return top_block; | |
4448 | ||
4449 | /* Prune the old tree away, so that it doesn't get in the way. */ | |
4450 | BLOCK_SUBBLOCKS (current_block) = 0; | |
4451 | ||
467456d0 RS |
4452 | for (insn = insns; insn; insn = NEXT_INSN (insn)) |
4453 | if (GET_CODE (insn) == NOTE) | |
4454 | { | |
4455 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG) | |
4456 | { | |
4457 | tree block = block_vector[NOTE_BLOCK_NUMBER (insn)]; | |
4458 | /* If we have seen this block before, copy it. */ | |
4459 | if (TREE_ASM_WRITTEN (block)) | |
4460 | block = copy_node (block); | |
fc289cd1 | 4461 | BLOCK_SUBBLOCKS (block) = 0; |
467456d0 RS |
4462 | TREE_ASM_WRITTEN (block) = 1; |
4463 | BLOCK_SUPERCONTEXT (block) = current_block; | |
4464 | BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block); | |
4465 | BLOCK_SUBBLOCKS (current_block) = block; | |
4466 | current_block = block; | |
1b2ac438 | 4467 | NOTE_SOURCE_FILE (insn) = 0; |
467456d0 RS |
4468 | } |
4469 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END) | |
4470 | { | |
4471 | BLOCK_SUBBLOCKS (current_block) | |
4472 | = blocks_nreverse (BLOCK_SUBBLOCKS (current_block)); | |
4473 | current_block = BLOCK_SUPERCONTEXT (current_block); | |
1b2ac438 | 4474 | NOTE_SOURCE_FILE (insn) = 0; |
467456d0 RS |
4475 | } |
4476 | } | |
4477 | ||
4478 | return current_block; | |
4479 | } | |
4480 | ||
4481 | /* Reverse the order of elements in the chain T of blocks, | |
4482 | and return the new head of the chain (old last element). */ | |
4483 | ||
4484 | static tree | |
4485 | blocks_nreverse (t) | |
4486 | tree t; | |
4487 | { | |
4488 | register tree prev = 0, decl, next; | |
4489 | for (decl = t; decl; decl = next) | |
4490 | { | |
4491 | next = BLOCK_CHAIN (decl); | |
4492 | BLOCK_CHAIN (decl) = prev; | |
4493 | prev = decl; | |
4494 | } | |
4495 | return prev; | |
4496 | } | |
4497 | ||
4498 | /* Count the subblocks of BLOCK, and list them all into the vector VECTOR. | |
4499 | Also clear TREE_ASM_WRITTEN in all blocks. */ | |
4500 | ||
4501 | static int | |
4502 | all_blocks (block, vector) | |
4503 | tree block; | |
4504 | tree *vector; | |
4505 | { | |
4506 | int n_blocks = 1; | |
4507 | tree subblocks; | |
4508 | ||
4509 | TREE_ASM_WRITTEN (block) = 0; | |
4510 | /* Record this block. */ | |
fc289cd1 JW |
4511 | if (vector) |
4512 | vector[0] = block; | |
467456d0 RS |
4513 | |
4514 | /* Record the subblocks, and their subblocks. */ | |
4515 | for (subblocks = BLOCK_SUBBLOCKS (block); | |
4516 | subblocks; subblocks = BLOCK_CHAIN (subblocks)) | |
fc289cd1 | 4517 | n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0); |
467456d0 RS |
4518 | |
4519 | return n_blocks; | |
4520 | } | |
4521 | \f | |
c20bf1f3 JB |
4522 | /* Build bytecode call descriptor for function SUBR. */ |
4523 | rtx | |
4524 | bc_build_calldesc (subr) | |
4525 | tree subr; | |
4526 | { | |
4527 | tree calldesc = 0, arg; | |
4528 | int nargs = 0; | |
4529 | ||
4530 | /* Build the argument description vector in reverse order. */ | |
4531 | DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr)); | |
4532 | nargs = 0; | |
4533 | ||
4534 | for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg)) | |
4535 | { | |
4536 | ++nargs; | |
4537 | ||
4538 | calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc); | |
4539 | calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc); | |
4540 | } | |
4541 | ||
4542 | DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr)); | |
4543 | ||
4544 | /* Prepend the function's return type. */ | |
4545 | calldesc = tree_cons ((tree) 0, | |
4546 | size_in_bytes (TREE_TYPE (TREE_TYPE (subr))), | |
4547 | calldesc); | |
4548 | ||
4549 | calldesc = tree_cons ((tree) 0, | |
4550 | bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))), | |
4551 | calldesc); | |
4552 | ||
4553 | /* Prepend the arg count. */ | |
4554 | calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc); | |
4555 | ||
4556 | /* Output the call description vector and get its address. */ | |
4557 | calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc); | |
4558 | TREE_TYPE (calldesc) = build_array_type (integer_type_node, | |
4559 | build_index_type (build_int_2 (nargs * 2, 0))); | |
4560 | ||
4561 | return output_constant_def (calldesc); | |
4562 | } | |
4563 | ||
4564 | ||
6f086dfc RS |
4565 | /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node) |
4566 | and initialize static variables for generating RTL for the statements | |
4567 | of the function. */ | |
4568 | ||
4569 | void | |
4570 | init_function_start (subr, filename, line) | |
4571 | tree subr; | |
4572 | char *filename; | |
4573 | int line; | |
4574 | { | |
4575 | char *junk; | |
4576 | ||
c20bf1f3 JB |
4577 | if (output_bytecode) |
4578 | { | |
4579 | this_function_decl = subr; | |
4580 | this_function_calldesc = bc_build_calldesc (subr); | |
4581 | local_vars_size = 0; | |
4582 | stack_depth = 0; | |
4583 | max_stack_depth = 0; | |
4584 | stmt_expr_depth = 0; | |
4585 | return; | |
4586 | } | |
4587 | ||
6f086dfc RS |
4588 | init_stmt_for_function (); |
4589 | ||
4590 | cse_not_expected = ! optimize; | |
4591 | ||
4592 | /* Caller save not needed yet. */ | |
4593 | caller_save_needed = 0; | |
4594 | ||
4595 | /* No stack slots have been made yet. */ | |
4596 | stack_slot_list = 0; | |
4597 | ||
4598 | /* There is no stack slot for handling nonlocal gotos. */ | |
4599 | nonlocal_goto_handler_slot = 0; | |
4600 | nonlocal_goto_stack_level = 0; | |
4601 | ||
4602 | /* No labels have been declared for nonlocal use. */ | |
4603 | nonlocal_labels = 0; | |
4604 | ||
4605 | /* No function calls so far in this function. */ | |
4606 | function_call_count = 0; | |
4607 | ||
4608 | /* No parm regs have been allocated. | |
4609 | (This is important for output_inline_function.) */ | |
4610 | max_parm_reg = LAST_VIRTUAL_REGISTER + 1; | |
4611 | ||
4612 | /* Initialize the RTL mechanism. */ | |
4613 | init_emit (); | |
4614 | ||
4615 | /* Initialize the queue of pending postincrement and postdecrements, | |
4616 | and some other info in expr.c. */ | |
4617 | init_expr (); | |
4618 | ||
4619 | /* We haven't done register allocation yet. */ | |
4620 | reg_renumber = 0; | |
4621 | ||
4622 | init_const_rtx_hash_table (); | |
4623 | ||
4624 | current_function_name = (*decl_printable_name) (subr, &junk); | |
4625 | ||
4626 | /* Nonzero if this is a nested function that uses a static chain. */ | |
4627 | ||
4628 | current_function_needs_context | |
4629 | = (decl_function_context (current_function_decl) != 0); | |
4630 | ||
4631 | /* Set if a call to setjmp is seen. */ | |
4632 | current_function_calls_setjmp = 0; | |
4633 | ||
4634 | /* Set if a call to longjmp is seen. */ | |
4635 | current_function_calls_longjmp = 0; | |
4636 | ||
4637 | current_function_calls_alloca = 0; | |
4638 | current_function_has_nonlocal_label = 0; | |
8634413a | 4639 | current_function_has_nonlocal_goto = 0; |
6f086dfc RS |
4640 | current_function_contains_functions = 0; |
4641 | ||
4642 | current_function_returns_pcc_struct = 0; | |
4643 | current_function_returns_struct = 0; | |
4644 | current_function_epilogue_delay_list = 0; | |
4645 | current_function_uses_const_pool = 0; | |
4646 | current_function_uses_pic_offset_table = 0; | |
4647 | ||
4648 | /* We have not yet needed to make a label to jump to for tail-recursion. */ | |
4649 | tail_recursion_label = 0; | |
4650 | ||
4651 | /* We haven't had a need to make a save area for ap yet. */ | |
4652 | ||
4653 | arg_pointer_save_area = 0; | |
4654 | ||
4655 | /* No stack slots allocated yet. */ | |
4656 | frame_offset = 0; | |
4657 | ||
4658 | /* No SAVE_EXPRs in this function yet. */ | |
4659 | save_expr_regs = 0; | |
4660 | ||
4661 | /* No RTL_EXPRs in this function yet. */ | |
4662 | rtl_expr_chain = 0; | |
4663 | ||
4664 | /* We have not allocated any temporaries yet. */ | |
4665 | temp_slots = 0; | |
4666 | temp_slot_level = 0; | |
4667 | ||
4668 | /* Within function body, compute a type's size as soon it is laid out. */ | |
4669 | immediate_size_expand++; | |
4670 | ||
d9a98e1a RK |
4671 | /* We haven't made any trampolines for this function yet. */ |
4672 | trampoline_list = 0; | |
4673 | ||
6f086dfc RS |
4674 | init_pending_stack_adjust (); |
4675 | inhibit_defer_pop = 0; | |
4676 | ||
4677 | current_function_outgoing_args_size = 0; | |
4678 | ||
4679 | /* Initialize the insn lengths. */ | |
4680 | init_insn_lengths (); | |
4681 | ||
4682 | /* Prevent ever trying to delete the first instruction of a function. | |
4683 | Also tell final how to output a linenum before the function prologue. */ | |
4684 | emit_line_note (filename, line); | |
4685 | ||
4686 | /* Make sure first insn is a note even if we don't want linenums. | |
4687 | This makes sure the first insn will never be deleted. | |
4688 | Also, final expects a note to appear there. */ | |
5f4f0e22 | 4689 | emit_note (NULL_PTR, NOTE_INSN_DELETED); |
6f086dfc RS |
4690 | |
4691 | /* Set flags used by final.c. */ | |
4692 | if (aggregate_value_p (DECL_RESULT (subr))) | |
4693 | { | |
4694 | #ifdef PCC_STATIC_STRUCT_RETURN | |
1b8297c1 | 4695 | current_function_returns_pcc_struct = 1; |
6f086dfc | 4696 | #endif |
1b8297c1 | 4697 | current_function_returns_struct = 1; |
6f086dfc RS |
4698 | } |
4699 | ||
4700 | /* Warn if this value is an aggregate type, | |
4701 | regardless of which calling convention we are using for it. */ | |
4702 | if (warn_aggregate_return | |
4703 | && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE | |
4704 | || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE | |
c1b98a95 | 4705 | || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == QUAL_UNION_TYPE |
6f086dfc RS |
4706 | || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE)) |
4707 | warning ("function returns an aggregate"); | |
4708 | ||
4709 | current_function_returns_pointer | |
4710 | = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE); | |
4711 | ||
4712 | /* Indicate that we need to distinguish between the return value of the | |
4713 | present function and the return value of a function being called. */ | |
4714 | rtx_equal_function_value_matters = 1; | |
4715 | ||
4716 | /* Indicate that we have not instantiated virtual registers yet. */ | |
4717 | virtuals_instantiated = 0; | |
4718 | ||
4719 | /* Indicate we have no need of a frame pointer yet. */ | |
4720 | frame_pointer_needed = 0; | |
4721 | ||
4722 | /* By default assume not varargs. */ | |
4723 | current_function_varargs = 0; | |
4724 | } | |
4725 | ||
4726 | /* Indicate that the current function uses extra args | |
4727 | not explicitly mentioned in the argument list in any fashion. */ | |
4728 | ||
4729 | void | |
4730 | mark_varargs () | |
4731 | { | |
4732 | current_function_varargs = 1; | |
4733 | } | |
4734 | ||
4735 | /* Expand a call to __main at the beginning of a possible main function. */ | |
4736 | ||
4737 | void | |
4738 | expand_main_function () | |
4739 | { | |
c20bf1f3 JB |
4740 | if (!output_bytecode) |
4741 | { | |
4742 | /* The zero below avoids a possible parse error */ | |
4743 | 0; | |
b335c2cc | 4744 | #if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main) |
ae0d8288 | 4745 | emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0, |
c20bf1f3 | 4746 | VOIDmode, 0); |
b335c2cc | 4747 | #endif /* not INIT_SECTION_ASM_OP or INVOKE__main */ |
c20bf1f3 | 4748 | } |
6f086dfc RS |
4749 | } |
4750 | \f | |
c20bf1f3 JB |
4751 | extern struct obstack permanent_obstack; |
4752 | ||
4753 | /* Expand start of bytecode function. See comment at | |
4754 | expand_function_start below for details. */ | |
4755 | ||
4756 | void | |
4757 | bc_expand_function_start (subr, parms_have_cleanups) | |
4758 | tree subr; | |
4759 | int parms_have_cleanups; | |
4760 | { | |
4761 | char label[20], *name; | |
4762 | static int nlab; | |
4763 | tree thisarg; | |
4764 | int argsz; | |
4765 | ||
4766 | if (TREE_PUBLIC (subr)) | |
4767 | bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr))); | |
4768 | ||
4769 | #ifdef DEBUG_PRINT_CODE | |
4770 | fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr))); | |
4771 | #endif | |
4772 | ||
4773 | for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg)) | |
4774 | { | |
4775 | if (DECL_RTL (thisarg)) | |
4776 | abort (); /* Should be NULL here I think. */ | |
4777 | else if (TREE_CONSTANT (DECL_SIZE (thisarg))) | |
4778 | { | |
4779 | DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0); | |
4780 | argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg)); | |
4781 | } | |
4782 | else | |
4783 | { | |
4784 | /* Variable-sized objects are pointers to their storage. */ | |
4785 | DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0); | |
4786 | argsz += POINTER_SIZE; | |
4787 | } | |
4788 | } | |
4789 | ||
4790 | bc_begin_function (bc_xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr)))); | |
4791 | ||
4792 | ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab); | |
4793 | ||
4794 | ++nlab; | |
4795 | name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label)); | |
4796 | this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0); | |
e7a42772 JB |
4797 | this_function_bytecode = |
4798 | bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo)); | |
c20bf1f3 JB |
4799 | } |
4800 | ||
4801 | ||
4802 | /* Expand end of bytecode function. See details the comment of | |
4803 | expand_function_end(), below. */ | |
4804 | ||
4805 | void | |
4806 | bc_expand_function_end () | |
4807 | { | |
4808 | char *ptrconsts; | |
4809 | ||
4810 | expand_null_return (); | |
4811 | ||
4812 | /* Emit any fixup code. This must be done before the call to | |
4813 | to BC_END_FUNCTION (), since that will cause the bytecode | |
4814 | segment to be finished off and closed. */ | |
4815 | ||
4816 | fixup_gotos (0, 0, 0, 0, 0); | |
4817 | ||
4818 | ptrconsts = bc_end_function (); | |
4819 | ||
4820 | bc_align_const (2 /* INT_ALIGN */); | |
4821 | ||
4822 | /* If this changes also make sure to change bc-interp.h! */ | |
4823 | ||
e7a42772 | 4824 | bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo)); |
c20bf1f3 JB |
4825 | bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth); |
4826 | bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size); | |
4827 | bc_emit_const_labelref (this_function_bytecode, 0); | |
4828 | bc_emit_const_labelref (ptrconsts, 0); | |
e7a42772 | 4829 | bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0); |
c20bf1f3 JB |
4830 | } |
4831 | ||
4832 | ||
6f086dfc RS |
4833 | /* Start the RTL for a new function, and set variables used for |
4834 | emitting RTL. | |
4835 | SUBR is the FUNCTION_DECL node. | |
4836 | PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with | |
4837 | the function's parameters, which must be run at any return statement. */ | |
4838 | ||
4839 | void | |
4840 | expand_function_start (subr, parms_have_cleanups) | |
4841 | tree subr; | |
4842 | int parms_have_cleanups; | |
4843 | { | |
4844 | register int i; | |
4845 | tree tem; | |
4846 | rtx last_ptr; | |
4847 | ||
c20bf1f3 JB |
4848 | if (output_bytecode) |
4849 | { | |
4850 | bc_expand_function_start (subr, parms_have_cleanups); | |
4851 | return; | |
4852 | } | |
4853 | ||
6f086dfc RS |
4854 | /* Make sure volatile mem refs aren't considered |
4855 | valid operands of arithmetic insns. */ | |
4856 | init_recog_no_volatile (); | |
4857 | ||
4858 | /* If function gets a static chain arg, store it in the stack frame. | |
4859 | Do this first, so it gets the first stack slot offset. */ | |
4860 | if (current_function_needs_context) | |
3e2481e9 JW |
4861 | { |
4862 | last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); | |
f0c51a1e RK |
4863 | |
4864 | #ifdef SMALL_REGISTER_CLASSES | |
4865 | /* Delay copying static chain if it is not a register to avoid | |
4866 | conflicts with regs used for parameters. */ | |
4867 | if (GET_CODE (static_chain_incoming_rtx) == REG) | |
4868 | #endif | |
4869 | emit_move_insn (last_ptr, static_chain_incoming_rtx); | |
3e2481e9 | 4870 | } |
6f086dfc RS |
4871 | |
4872 | /* If the parameters of this function need cleaning up, get a label | |
4873 | for the beginning of the code which executes those cleanups. This must | |
4874 | be done before doing anything with return_label. */ | |
4875 | if (parms_have_cleanups) | |
4876 | cleanup_label = gen_label_rtx (); | |
4877 | else | |
4878 | cleanup_label = 0; | |
4879 | ||
4880 | /* Make the label for return statements to jump to, if this machine | |
4881 | does not have a one-instruction return and uses an epilogue, | |
4882 | or if it returns a structure, or if it has parm cleanups. */ | |
4883 | #ifdef HAVE_return | |
4884 | if (cleanup_label == 0 && HAVE_return | |
4885 | && ! current_function_returns_pcc_struct | |
4886 | && ! (current_function_returns_struct && ! optimize)) | |
4887 | return_label = 0; | |
4888 | else | |
4889 | return_label = gen_label_rtx (); | |
4890 | #else | |
4891 | return_label = gen_label_rtx (); | |
4892 | #endif | |
4893 | ||
4894 | /* Initialize rtx used to return the value. */ | |
4895 | /* Do this before assign_parms so that we copy the struct value address | |
4896 | before any library calls that assign parms might generate. */ | |
4897 | ||
4898 | /* Decide whether to return the value in memory or in a register. */ | |
4899 | if (aggregate_value_p (DECL_RESULT (subr))) | |
4900 | { | |
4901 | /* Returning something that won't go in a register. */ | |
4902 | register rtx value_address; | |
4903 | ||
4904 | #ifdef PCC_STATIC_STRUCT_RETURN | |
4905 | if (current_function_returns_pcc_struct) | |
4906 | { | |
4907 | int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr))); | |
4908 | value_address = assemble_static_space (size); | |
4909 | } | |
4910 | else | |
4911 | #endif | |
4912 | { | |
4913 | /* Expect to be passed the address of a place to store the value. | |
4914 | If it is passed as an argument, assign_parms will take care of | |
4915 | it. */ | |
4916 | if (struct_value_incoming_rtx) | |
4917 | { | |
4918 | value_address = gen_reg_rtx (Pmode); | |
4919 | emit_move_insn (value_address, struct_value_incoming_rtx); | |
4920 | } | |
4921 | } | |
4922 | if (value_address) | |
ccdecf58 RK |
4923 | { |
4924 | DECL_RTL (DECL_RESULT (subr)) | |
4925 | = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), value_address); | |
4926 | MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr))) | |
4927 | = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE | |
4928 | || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE | |
4929 | || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == QUAL_UNION_TYPE | |
4930 | || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE); | |
4931 | } | |
6f086dfc RS |
4932 | } |
4933 | else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode) | |
4934 | /* If return mode is void, this decl rtl should not be used. */ | |
4935 | DECL_RTL (DECL_RESULT (subr)) = 0; | |
4936 | else if (parms_have_cleanups) | |
a53e14c0 RK |
4937 | { |
4938 | /* If function will end with cleanup code for parms, | |
4939 | compute the return values into a pseudo reg, | |
4940 | which we will copy into the true return register | |
4941 | after the cleanups are done. */ | |
4942 | ||
4943 | enum machine_mode mode = DECL_MODE (DECL_RESULT (subr)); | |
4944 | #ifdef PROMOTE_FUNCTION_RETURN | |
4945 | tree type = TREE_TYPE (DECL_RESULT (subr)); | |
4946 | int unsignedp = TREE_UNSIGNED (type); | |
4947 | ||
4948 | if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE | |
4949 | || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE | |
4950 | || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE | |
4951 | || TREE_CODE (type) == OFFSET_TYPE) | |
4952 | { | |
4953 | PROMOTE_MODE (mode, unsignedp, type); | |
4954 | } | |
4955 | #endif | |
4956 | ||
4957 | DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode); | |
4958 | } | |
6f086dfc RS |
4959 | else |
4960 | /* Scalar, returned in a register. */ | |
4961 | { | |
4962 | #ifdef FUNCTION_OUTGOING_VALUE | |
4963 | DECL_RTL (DECL_RESULT (subr)) | |
4964 | = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr); | |
4965 | #else | |
4966 | DECL_RTL (DECL_RESULT (subr)) | |
4967 | = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr); | |
4968 | #endif | |
4969 | ||
4970 | /* Mark this reg as the function's return value. */ | |
4971 | if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG) | |
4972 | { | |
4973 | REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1; | |
4974 | /* Needed because we may need to move this to memory | |
4975 | in case it's a named return value whose address is taken. */ | |
a82ad570 | 4976 | DECL_REGISTER (DECL_RESULT (subr)) = 1; |
6f086dfc RS |
4977 | } |
4978 | } | |
4979 | ||
4980 | /* Initialize rtx for parameters and local variables. | |
4981 | In some cases this requires emitting insns. */ | |
4982 | ||
4983 | assign_parms (subr, 0); | |
4984 | ||
f0c51a1e RK |
4985 | #ifdef SMALL_REGISTER_CLASSES |
4986 | /* Copy the static chain now if it wasn't a register. The delay is to | |
4987 | avoid conflicts with the parameter passing registers. */ | |
4988 | ||
4989 | if (current_function_needs_context) | |
4990 | if (GET_CODE (static_chain_incoming_rtx) != REG) | |
4991 | emit_move_insn (last_ptr, static_chain_incoming_rtx); | |
4992 | #endif | |
4993 | ||
6f086dfc RS |
4994 | /* The following was moved from init_function_start. |
4995 | The move is supposed to make sdb output more accurate. */ | |
4996 | /* Indicate the beginning of the function body, | |
4997 | as opposed to parm setup. */ | |
5f4f0e22 | 4998 | emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG); |
6f086dfc RS |
4999 | |
5000 | /* If doing stupid allocation, mark parms as born here. */ | |
5001 | ||
5002 | if (GET_CODE (get_last_insn ()) != NOTE) | |
5f4f0e22 | 5003 | emit_note (NULL_PTR, NOTE_INSN_DELETED); |
6f086dfc RS |
5004 | parm_birth_insn = get_last_insn (); |
5005 | ||
5006 | if (obey_regdecls) | |
5007 | { | |
5008 | for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++) | |
5009 | use_variable (regno_reg_rtx[i]); | |
5010 | ||
5011 | if (current_function_internal_arg_pointer != virtual_incoming_args_rtx) | |
5012 | use_variable (current_function_internal_arg_pointer); | |
5013 | } | |
5014 | ||
5015 | /* Fetch static chain values for containing functions. */ | |
5016 | tem = decl_function_context (current_function_decl); | |
3e2481e9 JW |
5017 | /* If not doing stupid register allocation, then start off with the static |
5018 | chain pointer in a pseudo register. Otherwise, we use the stack | |
5019 | address that was generated above. */ | |
5020 | if (tem && ! obey_regdecls) | |
6f086dfc RS |
5021 | last_ptr = copy_to_reg (static_chain_incoming_rtx); |
5022 | context_display = 0; | |
5023 | while (tem) | |
5024 | { | |
5025 | tree rtlexp = make_node (RTL_EXPR); | |
5026 | ||
5027 | RTL_EXPR_RTL (rtlexp) = last_ptr; | |
5028 | context_display = tree_cons (tem, rtlexp, context_display); | |
5029 | tem = decl_function_context (tem); | |
5030 | if (tem == 0) | |
5031 | break; | |
5032 | /* Chain thru stack frames, assuming pointer to next lexical frame | |
5033 | is found at the place we always store it. */ | |
5034 | #ifdef FRAME_GROWS_DOWNWARD | |
5035 | last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode)); | |
5036 | #endif | |
5037 | last_ptr = copy_to_reg (gen_rtx (MEM, Pmode, | |
5038 | memory_address (Pmode, last_ptr))); | |
e5884dac RK |
5039 | |
5040 | /* If we are not optimizing, ensure that we know that this | |
5041 | piece of context is live over the entire function. */ | |
5042 | if (! optimize) | |
5043 | save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, last_ptr, | |
5044 | save_expr_regs); | |
6f086dfc RS |
5045 | } |
5046 | ||
5047 | /* After the display initializations is where the tail-recursion label | |
5048 | should go, if we end up needing one. Ensure we have a NOTE here | |
5049 | since some things (like trampolines) get placed before this. */ | |
5f4f0e22 | 5050 | tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED); |
6f086dfc RS |
5051 | |
5052 | /* Evaluate now the sizes of any types declared among the arguments. */ | |
5053 | for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem)) | |
4752d3bc | 5054 | expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0); |
6f086dfc RS |
5055 | |
5056 | /* Make sure there is a line number after the function entry setup code. */ | |
5057 | force_next_line_note (); | |
5058 | } | |
5059 | \f | |
5060 | /* Generate RTL for the end of the current function. | |
980697fd | 5061 | FILENAME and LINE are the current position in the source file. |
6f086dfc | 5062 | |
980697fd | 5063 | It is up to language-specific callers to do cleanups for parameters-- |
1be07046 | 5064 | or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */ |
6f086dfc RS |
5065 | |
5066 | void | |
1be07046 | 5067 | expand_function_end (filename, line, end_bindings) |
6f086dfc RS |
5068 | char *filename; |
5069 | int line; | |
1be07046 | 5070 | int end_bindings; |
6f086dfc RS |
5071 | { |
5072 | register int i; | |
5073 | tree link; | |
5074 | ||
5075 | static rtx initial_trampoline; | |
5076 | ||
c20bf1f3 JB |
5077 | if (output_bytecode) |
5078 | { | |
5079 | bc_expand_function_end (); | |
5080 | return; | |
5081 | } | |
5082 | ||
6f086dfc RS |
5083 | #ifdef NON_SAVING_SETJMP |
5084 | /* Don't put any variables in registers if we call setjmp | |
5085 | on a machine that fails to restore the registers. */ | |
5086 | if (NON_SAVING_SETJMP && current_function_calls_setjmp) | |
5087 | { | |
5088 | setjmp_protect (DECL_INITIAL (current_function_decl)); | |
5089 | setjmp_protect_args (); | |
5090 | } | |
5091 | #endif | |
5092 | ||
5093 | /* Save the argument pointer if a save area was made for it. */ | |
5094 | if (arg_pointer_save_area) | |
5095 | { | |
5096 | rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx); | |
5097 | emit_insn_before (x, tail_recursion_reentry); | |
5098 | } | |
5099 | ||
5100 | /* Initialize any trampolines required by this function. */ | |
5101 | for (link = trampoline_list; link; link = TREE_CHAIN (link)) | |
5102 | { | |
5103 | tree function = TREE_PURPOSE (link); | |
5104 | rtx context = lookup_static_chain (function); | |
5105 | rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link)); | |
5106 | rtx seq; | |
5107 | ||
5108 | /* First make sure this compilation has a template for | |
5109 | initializing trampolines. */ | |
5110 | if (initial_trampoline == 0) | |
86f8eff3 RK |
5111 | { |
5112 | end_temporary_allocation (); | |
5113 | initial_trampoline | |
5114 | = gen_rtx (MEM, BLKmode, assemble_trampoline_template ()); | |
5115 | resume_temporary_allocation (); | |
5116 | } | |
6f086dfc RS |
5117 | |
5118 | /* Generate insns to initialize the trampoline. */ | |
5119 | start_sequence (); | |
5120 | tramp = change_address (initial_trampoline, BLKmode, | |
5121 | round_trampoline_addr (XEXP (tramp, 0))); | |
5f4f0e22 | 5122 | emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE), |
6f086dfc RS |
5123 | FUNCTION_BOUNDARY / BITS_PER_UNIT); |
5124 | INITIALIZE_TRAMPOLINE (XEXP (tramp, 0), | |
5125 | XEXP (DECL_RTL (function), 0), context); | |
5126 | seq = get_insns (); | |
5127 | end_sequence (); | |
5128 | ||
5129 | /* Put those insns at entry to the containing function (this one). */ | |
5130 | emit_insns_before (seq, tail_recursion_reentry); | |
5131 | } | |
6f086dfc RS |
5132 | |
5133 | #if 0 /* I think unused parms are legitimate enough. */ | |
5134 | /* Warn about unused parms. */ | |
5135 | if (warn_unused) | |
5136 | { | |
5137 | rtx decl; | |
5138 | ||
5139 | for (decl = DECL_ARGUMENTS (current_function_decl); | |
5140 | decl; decl = TREE_CHAIN (decl)) | |
5141 | if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL) | |
5142 | warning_with_decl (decl, "unused parameter `%s'"); | |
5143 | } | |
5144 | #endif | |
5145 | ||
5146 | /* Delete handlers for nonlocal gotos if nothing uses them. */ | |
5147 | if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label) | |
5148 | delete_handlers (); | |
5149 | ||
5150 | /* End any sequences that failed to be closed due to syntax errors. */ | |
5151 | while (in_sequence_p ()) | |
5f4f0e22 | 5152 | end_sequence (); |
6f086dfc RS |
5153 | |
5154 | /* Outside function body, can't compute type's actual size | |
5155 | until next function's body starts. */ | |
5156 | immediate_size_expand--; | |
5157 | ||
5158 | /* If doing stupid register allocation, | |
5159 | mark register parms as dying here. */ | |
5160 | ||
5161 | if (obey_regdecls) | |
5162 | { | |
5163 | rtx tem; | |
5164 | for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++) | |
5165 | use_variable (regno_reg_rtx[i]); | |
5166 | ||
5167 | /* Likewise for the regs of all the SAVE_EXPRs in the function. */ | |
5168 | ||
5169 | for (tem = save_expr_regs; tem; tem = XEXP (tem, 1)) | |
5170 | { | |
5171 | use_variable (XEXP (tem, 0)); | |
5172 | use_variable_after (XEXP (tem, 0), parm_birth_insn); | |
5173 | } | |
5174 | ||
5175 | if (current_function_internal_arg_pointer != virtual_incoming_args_rtx) | |
5176 | use_variable (current_function_internal_arg_pointer); | |
5177 | } | |
5178 | ||
5179 | clear_pending_stack_adjust (); | |
5180 | do_pending_stack_adjust (); | |
5181 | ||
5182 | /* Mark the end of the function body. | |
5183 | If control reaches this insn, the function can drop through | |
5184 | without returning a value. */ | |
5f4f0e22 | 5185 | emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END); |
6f086dfc RS |
5186 | |
5187 | /* Output a linenumber for the end of the function. | |
5188 | SDB depends on this. */ | |
5189 | emit_line_note_force (filename, line); | |
5190 | ||
5191 | /* Output the label for the actual return from the function, | |
5192 | if one is expected. This happens either because a function epilogue | |
5193 | is used instead of a return instruction, or because a return was done | |
5194 | with a goto in order to run local cleanups, or because of pcc-style | |
5195 | structure returning. */ | |
5196 | ||
5197 | if (return_label) | |
5198 | emit_label (return_label); | |
5199 | ||
1be07046 RS |
5200 | /* C++ uses this. */ |
5201 | if (end_bindings) | |
5202 | expand_end_bindings (0, 0, 0); | |
5203 | ||
6f086dfc RS |
5204 | /* If we had calls to alloca, and this machine needs |
5205 | an accurate stack pointer to exit the function, | |
5206 | insert some code to save and restore the stack pointer. */ | |
5207 | #ifdef EXIT_IGNORE_STACK | |
5208 | if (! EXIT_IGNORE_STACK) | |
5209 | #endif | |
5210 | if (current_function_calls_alloca) | |
5211 | { | |
59257ff7 RK |
5212 | rtx tem = 0; |
5213 | ||
5214 | emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn); | |
5f4f0e22 | 5215 | emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX); |
6f086dfc RS |
5216 | } |
5217 | ||
5218 | /* If scalar return value was computed in a pseudo-reg, | |
5219 | copy that to the hard return register. */ | |
5220 | if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0 | |
5221 | && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG | |
5222 | && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl))) | |
5223 | >= FIRST_PSEUDO_REGISTER)) | |
5224 | { | |
5225 | rtx real_decl_result; | |
5226 | ||
5227 | #ifdef FUNCTION_OUTGOING_VALUE | |
5228 | real_decl_result | |
5229 | = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)), | |
5230 | current_function_decl); | |
5231 | #else | |
5232 | real_decl_result | |
5233 | = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)), | |
5234 | current_function_decl); | |
5235 | #endif | |
5236 | REG_FUNCTION_VALUE_P (real_decl_result) = 1; | |
5237 | emit_move_insn (real_decl_result, | |
5238 | DECL_RTL (DECL_RESULT (current_function_decl))); | |
5239 | emit_insn (gen_rtx (USE, VOIDmode, real_decl_result)); | |
5240 | } | |
5241 | ||
5242 | /* If returning a structure, arrange to return the address of the value | |
5243 | in a place where debuggers expect to find it. | |
5244 | ||
5245 | If returning a structure PCC style, | |
5246 | the caller also depends on this value. | |
5247 | And current_function_returns_pcc_struct is not necessarily set. */ | |
5248 | if (current_function_returns_struct | |
5249 | || current_function_returns_pcc_struct) | |
5250 | { | |
5251 | rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0); | |
5252 | tree type = TREE_TYPE (DECL_RESULT (current_function_decl)); | |
5253 | #ifdef FUNCTION_OUTGOING_VALUE | |
5254 | rtx outgoing | |
5255 | = FUNCTION_OUTGOING_VALUE (build_pointer_type (type), | |
5256 | current_function_decl); | |
5257 | #else | |
5258 | rtx outgoing | |
5259 | = FUNCTION_VALUE (build_pointer_type (type), | |
5260 | current_function_decl); | |
5261 | #endif | |
5262 | ||
5263 | /* Mark this as a function return value so integrate will delete the | |
5264 | assignment and USE below when inlining this function. */ | |
5265 | REG_FUNCTION_VALUE_P (outgoing) = 1; | |
5266 | ||
5267 | emit_move_insn (outgoing, value_address); | |
5268 | use_variable (outgoing); | |
5269 | } | |
5270 | ||
5271 | /* Output a return insn if we are using one. | |
5272 | Otherwise, let the rtl chain end here, to drop through | |
5273 | into the epilogue. */ | |
5274 | ||
5275 | #ifdef HAVE_return | |
5276 | if (HAVE_return) | |
5277 | { | |
5278 | emit_jump_insn (gen_return ()); | |
5279 | emit_barrier (); | |
5280 | } | |
5281 | #endif | |
5282 | ||
5283 | /* Fix up any gotos that jumped out to the outermost | |
5284 | binding level of the function. | |
5285 | Must follow emitting RETURN_LABEL. */ | |
5286 | ||
5287 | /* If you have any cleanups to do at this point, | |
5288 | and they need to create temporary variables, | |
5289 | then you will lose. */ | |
5f4f0e22 | 5290 | fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0); |
6f086dfc | 5291 | } |
bdac5f58 TW |
5292 | \f |
5293 | /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */ | |
5294 | ||
5295 | static int *prologue; | |
5296 | static int *epilogue; | |
5297 | ||
5298 | /* Create an array that records the INSN_UIDs of INSNS (either a sequence | |
5299 | or a single insn). */ | |
5300 | ||
5301 | static int * | |
5302 | record_insns (insns) | |
5303 | rtx insns; | |
5304 | { | |
5305 | int *vec; | |
5306 | ||
5307 | if (GET_CODE (insns) == SEQUENCE) | |
5308 | { | |
5309 | int len = XVECLEN (insns, 0); | |
5310 | vec = (int *) oballoc ((len + 1) * sizeof (int)); | |
5311 | vec[len] = 0; | |
5312 | while (--len >= 0) | |
5313 | vec[len] = INSN_UID (XVECEXP (insns, 0, len)); | |
5314 | } | |
5315 | else | |
5316 | { | |
5317 | vec = (int *) oballoc (2 * sizeof (int)); | |
5318 | vec[0] = INSN_UID (insns); | |
5319 | vec[1] = 0; | |
5320 | } | |
5321 | return vec; | |
5322 | } | |
5323 | ||
10914065 | 5324 | /* Determine how many INSN_UIDs in VEC are part of INSN. */ |
bdac5f58 | 5325 | |
10914065 | 5326 | static int |
bdac5f58 TW |
5327 | contains (insn, vec) |
5328 | rtx insn; | |
5329 | int *vec; | |
5330 | { | |
5331 | register int i, j; | |
5332 | ||
5333 | if (GET_CODE (insn) == INSN | |
5334 | && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
5335 | { | |
10914065 | 5336 | int count = 0; |
bdac5f58 TW |
5337 | for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--) |
5338 | for (j = 0; vec[j]; j++) | |
5339 | if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j]) | |
10914065 TW |
5340 | count++; |
5341 | return count; | |
bdac5f58 TW |
5342 | } |
5343 | else | |
5344 | { | |
5345 | for (j = 0; vec[j]; j++) | |
5346 | if (INSN_UID (insn) == vec[j]) | |
10914065 | 5347 | return 1; |
bdac5f58 TW |
5348 | } |
5349 | return 0; | |
5350 | } | |
5351 | ||
5352 | /* Generate the prologe and epilogue RTL if the machine supports it. Thread | |
5353 | this into place with notes indicating where the prologue ends and where | |
5354 | the epilogue begins. Update the basic block information when possible. */ | |
5355 | ||
5356 | void | |
5357 | thread_prologue_and_epilogue_insns (f) | |
5358 | rtx f; | |
5359 | { | |
5360 | #ifdef HAVE_prologue | |
5361 | if (HAVE_prologue) | |
5362 | { | |
5363 | rtx head, seq, insn; | |
5364 | ||
5365 | /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more | |
5366 | prologue insns and a NOTE_INSN_PROLOGUE_END. */ | |
5367 | emit_note_after (NOTE_INSN_PROLOGUE_END, f); | |
5368 | seq = gen_prologue (); | |
5369 | head = emit_insn_after (seq, f); | |
5370 | ||
5371 | /* Include the new prologue insns in the first block. Ignore them | |
5372 | if they form a basic block unto themselves. */ | |
5373 | if (basic_block_head && n_basic_blocks | |
5374 | && GET_CODE (basic_block_head[0]) != CODE_LABEL) | |
5375 | basic_block_head[0] = NEXT_INSN (f); | |
5376 | ||
5377 | /* Retain a map of the prologue insns. */ | |
5378 | prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head); | |
5379 | } | |
5380 | else | |
5381 | #endif | |
5382 | prologue = 0; | |
5383 | ||
5384 | #ifdef HAVE_epilogue | |
5385 | if (HAVE_epilogue) | |
5386 | { | |
5387 | rtx insn = get_last_insn (); | |
5388 | rtx prev = prev_nonnote_insn (insn); | |
5389 | ||
5390 | /* If we end with a BARRIER, we don't need an epilogue. */ | |
5391 | if (! (prev && GET_CODE (prev) == BARRIER)) | |
5392 | { | |
a78bdb38 JW |
5393 | rtx tail, seq, tem; |
5394 | rtx first_use = 0; | |
5395 | rtx last_use = 0; | |
bdac5f58 | 5396 | |
a78bdb38 JW |
5397 | /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the |
5398 | epilogue insns, the USE insns at the end of a function, | |
5399 | the jump insn that returns, and then a BARRIER. */ | |
bdac5f58 | 5400 | |
a78bdb38 | 5401 | /* Move the USE insns at the end of a function onto a list. */ |
bdac5f58 TW |
5402 | while (prev |
5403 | && GET_CODE (prev) == INSN | |
5404 | && GET_CODE (PATTERN (prev)) == USE) | |
5405 | { | |
a78bdb38 | 5406 | tem = prev; |
bdac5f58 | 5407 | prev = prev_nonnote_insn (prev); |
a78bdb38 JW |
5408 | |
5409 | NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem); | |
5410 | PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem); | |
83eb3b0d RK |
5411 | if (first_use) |
5412 | { | |
5413 | NEXT_INSN (tem) = first_use; | |
5414 | PREV_INSN (first_use) = tem; | |
5415 | } | |
5416 | first_use = tem; | |
5417 | if (!last_use) | |
a78bdb38 | 5418 | last_use = tem; |
bdac5f58 TW |
5419 | } |
5420 | ||
a78bdb38 JW |
5421 | emit_barrier_after (insn); |
5422 | ||
bdac5f58 TW |
5423 | seq = gen_epilogue (); |
5424 | tail = emit_jump_insn_after (seq, insn); | |
a78bdb38 JW |
5425 | |
5426 | /* Insert the USE insns immediately before the return insn, which | |
5427 | must be the first instruction before the final barrier. */ | |
5428 | if (first_use) | |
5429 | { | |
5430 | tem = prev_nonnote_insn (get_last_insn ()); | |
5431 | NEXT_INSN (PREV_INSN (tem)) = first_use; | |
5432 | PREV_INSN (first_use) = PREV_INSN (tem); | |
5433 | PREV_INSN (tem) = last_use; | |
5434 | NEXT_INSN (last_use) = tem; | |
5435 | } | |
5436 | ||
bdac5f58 TW |
5437 | emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn); |
5438 | ||
5439 | /* Include the new epilogue insns in the last block. Ignore | |
5440 | them if they form a basic block unto themselves. */ | |
5441 | if (basic_block_end && n_basic_blocks | |
5442 | && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN) | |
5443 | basic_block_end[n_basic_blocks - 1] = tail; | |
5444 | ||
5445 | /* Retain a map of the epilogue insns. */ | |
5446 | epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail); | |
5447 | return; | |
5448 | } | |
5449 | } | |
5450 | #endif | |
5451 | epilogue = 0; | |
5452 | } | |
5453 | ||
5454 | /* Reposition the prologue-end and epilogue-begin notes after instruction | |
5455 | scheduling and delayed branch scheduling. */ | |
5456 | ||
5457 | void | |
5458 | reposition_prologue_and_epilogue_notes (f) | |
5459 | rtx f; | |
5460 | { | |
5461 | #if defined (HAVE_prologue) || defined (HAVE_epilogue) | |
5462 | /* Reposition the prologue and epilogue notes. */ | |
5463 | if (n_basic_blocks) | |
5464 | { | |
5465 | rtx next, prev; | |
bf526252 | 5466 | int len; |
bdac5f58 TW |
5467 | |
5468 | if (prologue) | |
5469 | { | |
bf526252 RK |
5470 | register rtx insn, note = 0; |
5471 | ||
5472 | /* Scan from the beginning until we reach the last prologue insn. | |
5473 | We apparently can't depend on basic_block_{head,end} after | |
5474 | reorg has run. */ | |
5475 | for (len = 0; prologue[len]; len++) | |
5476 | ; | |
9392c110 JH |
5477 | for (insn = f; len && insn; insn = NEXT_INSN (insn)) |
5478 | { | |
5479 | if (GET_CODE (insn) == NOTE) | |
5480 | { | |
5481 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END) | |
5482 | note = insn; | |
5483 | } | |
5484 | else if ((len -= contains (insn, prologue)) == 0) | |
5485 | { | |
5486 | /* Find the prologue-end note if we haven't already, and | |
5487 | move it to just after the last prologue insn. */ | |
5488 | if (note == 0) | |
5489 | { | |
5490 | for (note = insn; note = NEXT_INSN (note);) | |
5491 | if (GET_CODE (note) == NOTE | |
5492 | && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END) | |
5493 | break; | |
5494 | } | |
5495 | next = NEXT_INSN (note); | |
5496 | prev = PREV_INSN (note); | |
5497 | if (prev) | |
5498 | NEXT_INSN (prev) = next; | |
5499 | if (next) | |
5500 | PREV_INSN (next) = prev; | |
5501 | add_insn_after (note, insn); | |
5502 | } | |
5503 | } | |
bdac5f58 TW |
5504 | } |
5505 | ||
5506 | if (epilogue) | |
5507 | { | |
bf526252 RK |
5508 | register rtx insn, note = 0; |
5509 | ||
5510 | /* Scan from the end until we reach the first epilogue insn. | |
5511 | We apparently can't depend on basic_block_{head,end} after | |
5512 | reorg has run. */ | |
5513 | for (len = 0; epilogue[len]; len++) | |
5514 | ; | |
9392c110 JH |
5515 | for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn)) |
5516 | { | |
5517 | if (GET_CODE (insn) == NOTE) | |
5518 | { | |
5519 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG) | |
5520 | note = insn; | |
5521 | } | |
5522 | else if ((len -= contains (insn, epilogue)) == 0) | |
5523 | { | |
5524 | /* Find the epilogue-begin note if we haven't already, and | |
5525 | move it to just before the first epilogue insn. */ | |
5526 | if (note == 0) | |
5527 | { | |
5528 | for (note = insn; note = PREV_INSN (note);) | |
5529 | if (GET_CODE (note) == NOTE | |
5530 | && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG) | |
5531 | break; | |
5532 | } | |
5533 | next = NEXT_INSN (note); | |
5534 | prev = PREV_INSN (note); | |
5535 | if (prev) | |
5536 | NEXT_INSN (prev) = next; | |
5537 | if (next) | |
5538 | PREV_INSN (next) = prev; | |
5539 | add_insn_after (note, PREV_INSN (insn)); | |
5540 | } | |
5541 | } | |
bdac5f58 TW |
5542 | } |
5543 | } | |
5544 | #endif /* HAVE_prologue or HAVE_epilogue */ | |
5545 | } |