]>
Commit | Line | Data |
---|---|---|
1 | /* Expands front end tree to back end RTL for GNU C-Compiler | |
2 | Copyright (C) 1987, 88, 89, 91, 92, 1993 Free Software Foundation, Inc. | |
3 | ||
4 | This file is part of GNU CC. | |
5 | ||
6 | GNU CC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8 | the Free Software Foundation; either version 2, or (at your option) | |
9 | any later version. | |
10 | ||
11 | GNU CC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GNU CC; see the file COPYING. If not, write to | |
18 | the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ | |
19 | ||
20 | ||
21 | /* This file handles the generation of rtl code from tree structure | |
22 | at the level of the function as a whole. | |
23 | It creates the rtl expressions for parameters and auto variables | |
24 | and has full responsibility for allocating stack slots. | |
25 | ||
26 | `expand_function_start' is called at the beginning of a function, | |
27 | before the function body is parsed, and `expand_function_end' is | |
28 | called after parsing the body. | |
29 | ||
30 | Call `assign_stack_local' to allocate a stack slot for a local variable. | |
31 | This is usually done during the RTL generation for the function body, | |
32 | but it can also be done in the reload pass when a pseudo-register does | |
33 | not get a hard register. | |
34 | ||
35 | Call `put_var_into_stack' when you learn, belatedly, that a variable | |
36 | previously given a pseudo-register must in fact go in the stack. | |
37 | This function changes the DECL_RTL to be a stack slot instead of a reg | |
38 | then scans all the RTL instructions so far generated to correct them. */ | |
39 | ||
40 | #include "config.h" | |
41 | ||
42 | #include <stdio.h> | |
43 | ||
44 | #include "rtl.h" | |
45 | #include "tree.h" | |
46 | #include "flags.h" | |
47 | #include "function.h" | |
48 | #include "insn-flags.h" | |
49 | #include "expr.h" | |
50 | #include "insn-codes.h" | |
51 | #include "regs.h" | |
52 | #include "hard-reg-set.h" | |
53 | #include "insn-config.h" | |
54 | #include "recog.h" | |
55 | #include "output.h" | |
56 | #include "basic-block.h" | |
57 | ||
58 | /* Some systems use __main in a way incompatible with its use in gcc, in these | |
59 | cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to | |
60 | give the same symbol without quotes for an alternative entry point. You | |
61 | must define both, or niether. */ | |
62 | #ifndef NAME__MAIN | |
63 | #define NAME__MAIN "__main" | |
64 | #define SYMBOL__MAIN __main | |
65 | #endif | |
66 | ||
67 | /* Round a value to the lowest integer less than it that is a multiple of | |
68 | the required alignment. Avoid using division in case the value is | |
69 | negative. Assume the alignment is a power of two. */ | |
70 | #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1)) | |
71 | ||
72 | /* Similar, but round to the next highest integer that meets the | |
73 | alignment. */ | |
74 | #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1)) | |
75 | ||
76 | /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp | |
77 | during rtl generation. If they are different register numbers, this is | |
78 | always true. It may also be true if | |
79 | FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl | |
80 | generation. See fix_lexical_addr for details. */ | |
81 | ||
82 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
83 | #define NEED_SEPARATE_AP | |
84 | #endif | |
85 | ||
86 | /* Number of bytes of args popped by function being compiled on its return. | |
87 | Zero if no bytes are to be popped. | |
88 | May affect compilation of return insn or of function epilogue. */ | |
89 | ||
90 | int current_function_pops_args; | |
91 | ||
92 | /* Nonzero if function being compiled needs to be given an address | |
93 | where the value should be stored. */ | |
94 | ||
95 | int current_function_returns_struct; | |
96 | ||
97 | /* Nonzero if function being compiled needs to | |
98 | return the address of where it has put a structure value. */ | |
99 | ||
100 | int current_function_returns_pcc_struct; | |
101 | ||
102 | /* Nonzero if function being compiled needs to be passed a static chain. */ | |
103 | ||
104 | int current_function_needs_context; | |
105 | ||
106 | /* Nonzero if function being compiled can call setjmp. */ | |
107 | ||
108 | int current_function_calls_setjmp; | |
109 | ||
110 | /* Nonzero if function being compiled can call longjmp. */ | |
111 | ||
112 | int current_function_calls_longjmp; | |
113 | ||
114 | /* Nonzero if function being compiled receives nonlocal gotos | |
115 | from nested functions. */ | |
116 | ||
117 | int current_function_has_nonlocal_label; | |
118 | ||
119 | /* Nonzero if function being compiled contains nested functions. */ | |
120 | ||
121 | int current_function_contains_functions; | |
122 | ||
123 | /* Nonzero if function being compiled can call alloca, | |
124 | either as a subroutine or builtin. */ | |
125 | ||
126 | int current_function_calls_alloca; | |
127 | ||
128 | /* Nonzero if the current function returns a pointer type */ | |
129 | ||
130 | int current_function_returns_pointer; | |
131 | ||
132 | /* If some insns can be deferred to the delay slots of the epilogue, the | |
133 | delay list for them is recorded here. */ | |
134 | ||
135 | rtx current_function_epilogue_delay_list; | |
136 | ||
137 | /* If function's args have a fixed size, this is that size, in bytes. | |
138 | Otherwise, it is -1. | |
139 | May affect compilation of return insn or of function epilogue. */ | |
140 | ||
141 | int current_function_args_size; | |
142 | ||
143 | /* # bytes the prologue should push and pretend that the caller pushed them. | |
144 | The prologue must do this, but only if parms can be passed in registers. */ | |
145 | ||
146 | int current_function_pretend_args_size; | |
147 | ||
148 | /* # of bytes of outgoing arguments required to be pushed by the prologue. | |
149 | If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined | |
150 | and no stack adjusts will be done on function calls. */ | |
151 | ||
152 | int current_function_outgoing_args_size; | |
153 | ||
154 | /* This is the offset from the arg pointer to the place where the first | |
155 | anonymous arg can be found, if there is one. */ | |
156 | ||
157 | rtx current_function_arg_offset_rtx; | |
158 | ||
159 | /* Nonzero if current function uses varargs.h or equivalent. | |
160 | Zero for functions that use stdarg.h. */ | |
161 | ||
162 | int current_function_varargs; | |
163 | ||
164 | /* Quantities of various kinds of registers | |
165 | used for the current function's args. */ | |
166 | ||
167 | CUMULATIVE_ARGS current_function_args_info; | |
168 | ||
169 | /* Name of function now being compiled. */ | |
170 | ||
171 | char *current_function_name; | |
172 | ||
173 | /* If non-zero, an RTL expression for that location at which the current | |
174 | function returns its result. Always equal to | |
175 | DECL_RTL (DECL_RESULT (current_function_decl)), but provided | |
176 | independently of the tree structures. */ | |
177 | ||
178 | rtx current_function_return_rtx; | |
179 | ||
180 | /* Nonzero if the current function uses the constant pool. */ | |
181 | ||
182 | int current_function_uses_const_pool; | |
183 | ||
184 | /* Nonzero if the current function uses pic_offset_table_rtx. */ | |
185 | int current_function_uses_pic_offset_table; | |
186 | ||
187 | /* The arg pointer hard register, or the pseudo into which it was copied. */ | |
188 | rtx current_function_internal_arg_pointer; | |
189 | ||
190 | /* The FUNCTION_DECL for an inline function currently being expanded. */ | |
191 | tree inline_function_decl; | |
192 | ||
193 | /* Number of function calls seen so far in current function. */ | |
194 | ||
195 | int function_call_count; | |
196 | ||
197 | /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels | |
198 | (labels to which there can be nonlocal gotos from nested functions) | |
199 | in this function. */ | |
200 | ||
201 | tree nonlocal_labels; | |
202 | ||
203 | /* RTX for stack slot that holds the current handler for nonlocal gotos. | |
204 | Zero when function does not have nonlocal labels. */ | |
205 | ||
206 | rtx nonlocal_goto_handler_slot; | |
207 | ||
208 | /* RTX for stack slot that holds the stack pointer value to restore | |
209 | for a nonlocal goto. | |
210 | Zero when function does not have nonlocal labels. */ | |
211 | ||
212 | rtx nonlocal_goto_stack_level; | |
213 | ||
214 | /* Label that will go on parm cleanup code, if any. | |
215 | Jumping to this label runs cleanup code for parameters, if | |
216 | such code must be run. Following this code is the logical return label. */ | |
217 | ||
218 | rtx cleanup_label; | |
219 | ||
220 | /* Label that will go on function epilogue. | |
221 | Jumping to this label serves as a "return" instruction | |
222 | on machines which require execution of the epilogue on all returns. */ | |
223 | ||
224 | rtx return_label; | |
225 | ||
226 | /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs. | |
227 | So we can mark them all live at the end of the function, if nonopt. */ | |
228 | rtx save_expr_regs; | |
229 | ||
230 | /* List (chain of EXPR_LISTs) of all stack slots in this function. | |
231 | Made for the sake of unshare_all_rtl. */ | |
232 | rtx stack_slot_list; | |
233 | ||
234 | /* Chain of all RTL_EXPRs that have insns in them. */ | |
235 | tree rtl_expr_chain; | |
236 | ||
237 | /* Label to jump back to for tail recursion, or 0 if we have | |
238 | not yet needed one for this function. */ | |
239 | rtx tail_recursion_label; | |
240 | ||
241 | /* Place after which to insert the tail_recursion_label if we need one. */ | |
242 | rtx tail_recursion_reentry; | |
243 | ||
244 | /* Location at which to save the argument pointer if it will need to be | |
245 | referenced. There are two cases where this is done: if nonlocal gotos | |
246 | exist, or if vars stored at an offset from the argument pointer will be | |
247 | needed by inner routines. */ | |
248 | ||
249 | rtx arg_pointer_save_area; | |
250 | ||
251 | /* Offset to end of allocated area of stack frame. | |
252 | If stack grows down, this is the address of the last stack slot allocated. | |
253 | If stack grows up, this is the address for the next slot. */ | |
254 | int frame_offset; | |
255 | ||
256 | /* List (chain of TREE_LISTs) of static chains for containing functions. | |
257 | Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx | |
258 | in an RTL_EXPR in the TREE_VALUE. */ | |
259 | static tree context_display; | |
260 | ||
261 | /* List (chain of TREE_LISTs) of trampolines for nested functions. | |
262 | The trampoline sets up the static chain and jumps to the function. | |
263 | We supply the trampoline's address when the function's address is requested. | |
264 | ||
265 | Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx | |
266 | in an RTL_EXPR in the TREE_VALUE. */ | |
267 | static tree trampoline_list; | |
268 | ||
269 | /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */ | |
270 | static rtx parm_birth_insn; | |
271 | ||
272 | #if 0 | |
273 | /* Nonzero if a stack slot has been generated whose address is not | |
274 | actually valid. It means that the generated rtl must all be scanned | |
275 | to detect and correct the invalid addresses where they occur. */ | |
276 | static int invalid_stack_slot; | |
277 | #endif | |
278 | ||
279 | /* Last insn of those whose job was to put parms into their nominal homes. */ | |
280 | static rtx last_parm_insn; | |
281 | ||
282 | /* 1 + last pseudo register number used for loading a copy | |
283 | of a parameter of this function. */ | |
284 | static int max_parm_reg; | |
285 | ||
286 | /* Vector indexed by REGNO, containing location on stack in which | |
287 | to put the parm which is nominally in pseudo register REGNO, | |
288 | if we discover that that parm must go in the stack. */ | |
289 | static rtx *parm_reg_stack_loc; | |
290 | ||
291 | #if 0 /* Turned off because 0 seems to work just as well. */ | |
292 | /* Cleanup lists are required for binding levels regardless of whether | |
293 | that binding level has cleanups or not. This node serves as the | |
294 | cleanup list whenever an empty list is required. */ | |
295 | static tree empty_cleanup_list; | |
296 | #endif | |
297 | ||
298 | /* Nonzero once virtual register instantiation has been done. | |
299 | assign_stack_local uses frame_pointer_rtx when this is nonzero. */ | |
300 | static int virtuals_instantiated; | |
301 | ||
302 | /* Nonzero if we need to distinguish between the return value of this function | |
303 | and the return value of a function called by this function. This helps | |
304 | integrate.c */ | |
305 | ||
306 | extern int rtx_equal_function_value_matters; | |
307 | ||
308 | void fixup_gotos (); | |
309 | ||
310 | static tree round_down (); | |
311 | static rtx round_trampoline_addr (); | |
312 | static rtx fixup_stack_1 (); | |
313 | static void put_reg_into_stack (); | |
314 | static void fixup_var_refs (); | |
315 | static void fixup_var_refs_insns (); | |
316 | static void fixup_var_refs_1 (); | |
317 | static void optimize_bit_field (); | |
318 | static void instantiate_decls (); | |
319 | static void instantiate_decls_1 (); | |
320 | static void instantiate_decl (); | |
321 | static int instantiate_virtual_regs_1 (); | |
322 | static rtx fixup_memory_subreg (); | |
323 | static rtx walk_fixup_memory_subreg (); | |
324 | \f | |
325 | /* In order to evaluate some expressions, such as function calls returning | |
326 | structures in memory, we need to temporarily allocate stack locations. | |
327 | We record each allocated temporary in the following structure. | |
328 | ||
329 | Associated with each temporary slot is a nesting level. When we pop up | |
330 | one level, all temporaries associated with the previous level are freed. | |
331 | Normally, all temporaries are freed after the execution of the statement | |
332 | in which they were created. However, if we are inside a ({...}) grouping, | |
333 | the result may be in a temporary and hence must be preserved. If the | |
334 | result could be in a temporary, we preserve it if we can determine which | |
335 | one it is in. If we cannot determine which temporary may contain the | |
336 | result, all temporaries are preserved. A temporary is preserved by | |
337 | pretending it was allocated at the previous nesting level. | |
338 | ||
339 | Automatic variables are also assigned temporary slots, at the nesting | |
340 | level where they are defined. They are marked a "kept" so that | |
341 | free_temp_slots will not free them. */ | |
342 | ||
343 | struct temp_slot | |
344 | { | |
345 | /* Points to next temporary slot. */ | |
346 | struct temp_slot *next; | |
347 | /* The rtx to used to reference the slot. */ | |
348 | rtx slot; | |
349 | /* The size, in units, of the slot. */ | |
350 | int size; | |
351 | /* Non-zero if this temporary is currently in use. */ | |
352 | char in_use; | |
353 | /* Nesting level at which this slot is being used. */ | |
354 | int level; | |
355 | /* Non-zero if this should survive a call to free_temp_slots. */ | |
356 | int keep; | |
357 | }; | |
358 | ||
359 | /* List of all temporaries allocated, both available and in use. */ | |
360 | ||
361 | struct temp_slot *temp_slots; | |
362 | ||
363 | /* Current nesting level for temporaries. */ | |
364 | ||
365 | int temp_slot_level; | |
366 | \f | |
367 | /* Pointer to chain of `struct function' for containing functions. */ | |
368 | struct function *outer_function_chain; | |
369 | ||
370 | /* Given a function decl for a containing function, | |
371 | return the `struct function' for it. */ | |
372 | ||
373 | struct function * | |
374 | find_function_data (decl) | |
375 | tree decl; | |
376 | { | |
377 | struct function *p; | |
378 | for (p = outer_function_chain; p; p = p->next) | |
379 | if (p->decl == decl) | |
380 | return p; | |
381 | abort (); | |
382 | } | |
383 | ||
384 | /* Save the current context for compilation of a nested function. | |
385 | This is called from language-specific code. | |
386 | The caller is responsible for saving any language-specific status, | |
387 | since this function knows only about language-independent variables. */ | |
388 | ||
389 | void | |
390 | push_function_context () | |
391 | { | |
392 | struct function *p = (struct function *) xmalloc (sizeof (struct function)); | |
393 | ||
394 | p->next = outer_function_chain; | |
395 | outer_function_chain = p; | |
396 | ||
397 | p->name = current_function_name; | |
398 | p->decl = current_function_decl; | |
399 | p->pops_args = current_function_pops_args; | |
400 | p->returns_struct = current_function_returns_struct; | |
401 | p->returns_pcc_struct = current_function_returns_pcc_struct; | |
402 | p->needs_context = current_function_needs_context; | |
403 | p->calls_setjmp = current_function_calls_setjmp; | |
404 | p->calls_longjmp = current_function_calls_longjmp; | |
405 | p->calls_alloca = current_function_calls_alloca; | |
406 | p->has_nonlocal_label = current_function_has_nonlocal_label; | |
407 | p->args_size = current_function_args_size; | |
408 | p->pretend_args_size = current_function_pretend_args_size; | |
409 | p->arg_offset_rtx = current_function_arg_offset_rtx; | |
410 | p->uses_const_pool = current_function_uses_const_pool; | |
411 | p->uses_pic_offset_table = current_function_uses_pic_offset_table; | |
412 | p->internal_arg_pointer = current_function_internal_arg_pointer; | |
413 | p->max_parm_reg = max_parm_reg; | |
414 | p->parm_reg_stack_loc = parm_reg_stack_loc; | |
415 | p->outgoing_args_size = current_function_outgoing_args_size; | |
416 | p->return_rtx = current_function_return_rtx; | |
417 | p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot; | |
418 | p->nonlocal_goto_stack_level = nonlocal_goto_stack_level; | |
419 | p->nonlocal_labels = nonlocal_labels; | |
420 | p->cleanup_label = cleanup_label; | |
421 | p->return_label = return_label; | |
422 | p->save_expr_regs = save_expr_regs; | |
423 | p->stack_slot_list = stack_slot_list; | |
424 | p->parm_birth_insn = parm_birth_insn; | |
425 | p->frame_offset = frame_offset; | |
426 | p->tail_recursion_label = tail_recursion_label; | |
427 | p->tail_recursion_reentry = tail_recursion_reentry; | |
428 | p->arg_pointer_save_area = arg_pointer_save_area; | |
429 | p->rtl_expr_chain = rtl_expr_chain; | |
430 | p->last_parm_insn = last_parm_insn; | |
431 | p->context_display = context_display; | |
432 | p->trampoline_list = trampoline_list; | |
433 | p->function_call_count = function_call_count; | |
434 | p->temp_slots = temp_slots; | |
435 | p->temp_slot_level = temp_slot_level; | |
436 | p->fixup_var_refs_queue = 0; | |
437 | p->epilogue_delay_list = current_function_epilogue_delay_list; | |
438 | ||
439 | save_tree_status (p); | |
440 | save_storage_status (p); | |
441 | save_emit_status (p); | |
442 | init_emit (); | |
443 | save_expr_status (p); | |
444 | save_stmt_status (p); | |
445 | save_varasm_status (p); | |
446 | } | |
447 | ||
448 | /* Restore the last saved context, at the end of a nested function. | |
449 | This function is called from language-specific code. */ | |
450 | ||
451 | void | |
452 | pop_function_context () | |
453 | { | |
454 | struct function *p = outer_function_chain; | |
455 | ||
456 | outer_function_chain = p->next; | |
457 | ||
458 | current_function_name = p->name; | |
459 | current_function_decl = p->decl; | |
460 | current_function_pops_args = p->pops_args; | |
461 | current_function_returns_struct = p->returns_struct; | |
462 | current_function_returns_pcc_struct = p->returns_pcc_struct; | |
463 | current_function_needs_context = p->needs_context; | |
464 | current_function_calls_setjmp = p->calls_setjmp; | |
465 | current_function_calls_longjmp = p->calls_longjmp; | |
466 | current_function_calls_alloca = p->calls_alloca; | |
467 | current_function_has_nonlocal_label = p->has_nonlocal_label; | |
468 | current_function_contains_functions = 1; | |
469 | current_function_args_size = p->args_size; | |
470 | current_function_pretend_args_size = p->pretend_args_size; | |
471 | current_function_arg_offset_rtx = p->arg_offset_rtx; | |
472 | current_function_uses_const_pool = p->uses_const_pool; | |
473 | current_function_uses_pic_offset_table = p->uses_pic_offset_table; | |
474 | current_function_internal_arg_pointer = p->internal_arg_pointer; | |
475 | max_parm_reg = p->max_parm_reg; | |
476 | parm_reg_stack_loc = p->parm_reg_stack_loc; | |
477 | current_function_outgoing_args_size = p->outgoing_args_size; | |
478 | current_function_return_rtx = p->return_rtx; | |
479 | nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot; | |
480 | nonlocal_goto_stack_level = p->nonlocal_goto_stack_level; | |
481 | nonlocal_labels = p->nonlocal_labels; | |
482 | cleanup_label = p->cleanup_label; | |
483 | return_label = p->return_label; | |
484 | save_expr_regs = p->save_expr_regs; | |
485 | stack_slot_list = p->stack_slot_list; | |
486 | parm_birth_insn = p->parm_birth_insn; | |
487 | frame_offset = p->frame_offset; | |
488 | tail_recursion_label = p->tail_recursion_label; | |
489 | tail_recursion_reentry = p->tail_recursion_reentry; | |
490 | arg_pointer_save_area = p->arg_pointer_save_area; | |
491 | rtl_expr_chain = p->rtl_expr_chain; | |
492 | last_parm_insn = p->last_parm_insn; | |
493 | context_display = p->context_display; | |
494 | trampoline_list = p->trampoline_list; | |
495 | function_call_count = p->function_call_count; | |
496 | temp_slots = p->temp_slots; | |
497 | temp_slot_level = p->temp_slot_level; | |
498 | current_function_epilogue_delay_list = p->epilogue_delay_list; | |
499 | ||
500 | restore_tree_status (p); | |
501 | restore_storage_status (p); | |
502 | restore_expr_status (p); | |
503 | restore_emit_status (p); | |
504 | restore_stmt_status (p); | |
505 | restore_varasm_status (p); | |
506 | ||
507 | /* Finish doing put_var_into_stack for any of our variables | |
508 | which became addressable during the nested function. */ | |
509 | { | |
510 | struct var_refs_queue *queue = p->fixup_var_refs_queue; | |
511 | for (; queue; queue = queue->next) | |
512 | fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp); | |
513 | } | |
514 | ||
515 | free (p); | |
516 | ||
517 | /* Reset variables that have known state during rtx generation. */ | |
518 | rtx_equal_function_value_matters = 1; | |
519 | virtuals_instantiated = 0; | |
520 | } | |
521 | \f | |
522 | /* Allocate fixed slots in the stack frame of the current function. */ | |
523 | ||
524 | /* Return size needed for stack frame based on slots so far allocated. | |
525 | This size counts from zero. It is not rounded to STACK_BOUNDARY; | |
526 | the caller may have to do that. */ | |
527 | ||
528 | int | |
529 | get_frame_size () | |
530 | { | |
531 | #ifdef FRAME_GROWS_DOWNWARD | |
532 | return -frame_offset; | |
533 | #else | |
534 | return frame_offset; | |
535 | #endif | |
536 | } | |
537 | ||
538 | /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it | |
539 | with machine mode MODE. | |
540 | ||
541 | ALIGN controls the amount of alignment for the address of the slot: | |
542 | 0 means according to MODE, | |
543 | -1 means use BIGGEST_ALIGNMENT and round size to multiple of that, | |
544 | positive specifies alignment boundary in bits. | |
545 | ||
546 | We do not round to stack_boundary here. */ | |
547 | ||
548 | rtx | |
549 | assign_stack_local (mode, size, align) | |
550 | enum machine_mode mode; | |
551 | int size; | |
552 | int align; | |
553 | { | |
554 | register rtx x, addr; | |
555 | int bigend_correction = 0; | |
556 | int alignment; | |
557 | ||
558 | if (align == 0) | |
559 | { | |
560 | alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
561 | if (mode == BLKmode) | |
562 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
563 | } | |
564 | else if (align == -1) | |
565 | { | |
566 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
567 | size = CEIL_ROUND (size, alignment); | |
568 | } | |
569 | else | |
570 | alignment = align / BITS_PER_UNIT; | |
571 | ||
572 | /* Round frame offset to that alignment. | |
573 | We must be careful here, since FRAME_OFFSET might be negative and | |
574 | division with a negative dividend isn't as well defined as we might | |
575 | like. So we instead assume that ALIGNMENT is a power of two and | |
576 | use logical operations which are unambiguous. */ | |
577 | #ifdef FRAME_GROWS_DOWNWARD | |
578 | frame_offset = FLOOR_ROUND (frame_offset, alignment); | |
579 | #else | |
580 | frame_offset = CEIL_ROUND (frame_offset, alignment); | |
581 | #endif | |
582 | ||
583 | /* On a big-endian machine, if we are allocating more space than we will use, | |
584 | use the least significant bytes of those that are allocated. */ | |
585 | #if BYTES_BIG_ENDIAN | |
586 | if (mode != BLKmode) | |
587 | bigend_correction = size - GET_MODE_SIZE (mode); | |
588 | #endif | |
589 | ||
590 | #ifdef FRAME_GROWS_DOWNWARD | |
591 | frame_offset -= size; | |
592 | #endif | |
593 | ||
594 | /* If we have already instantiated virtual registers, return the actual | |
595 | address relative to the frame pointer. */ | |
596 | if (virtuals_instantiated) | |
597 | addr = plus_constant (frame_pointer_rtx, | |
598 | (frame_offset + bigend_correction | |
599 | + STARTING_FRAME_OFFSET)); | |
600 | else | |
601 | addr = plus_constant (virtual_stack_vars_rtx, | |
602 | frame_offset + bigend_correction); | |
603 | ||
604 | #ifndef FRAME_GROWS_DOWNWARD | |
605 | frame_offset += size; | |
606 | #endif | |
607 | ||
608 | x = gen_rtx (MEM, mode, addr); | |
609 | ||
610 | stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list); | |
611 | ||
612 | return x; | |
613 | } | |
614 | ||
615 | /* Assign a stack slot in a containing function. | |
616 | First three arguments are same as in preceding function. | |
617 | The last argument specifies the function to allocate in. */ | |
618 | ||
619 | rtx | |
620 | assign_outer_stack_local (mode, size, align, function) | |
621 | enum machine_mode mode; | |
622 | int size; | |
623 | int align; | |
624 | struct function *function; | |
625 | { | |
626 | register rtx x, addr; | |
627 | int bigend_correction = 0; | |
628 | int alignment; | |
629 | ||
630 | /* Allocate in the memory associated with the function in whose frame | |
631 | we are assigning. */ | |
632 | push_obstacks (function->function_obstack, | |
633 | function->function_maybepermanent_obstack); | |
634 | ||
635 | if (align == 0) | |
636 | { | |
637 | alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
638 | if (mode == BLKmode) | |
639 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
640 | } | |
641 | else if (align == -1) | |
642 | { | |
643 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
644 | size = CEIL_ROUND (size, alignment); | |
645 | } | |
646 | else | |
647 | alignment = align / BITS_PER_UNIT; | |
648 | ||
649 | /* Round frame offset to that alignment. */ | |
650 | #ifdef FRAME_GROWS_DOWNWARD | |
651 | function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment); | |
652 | #else | |
653 | function->frame_offset = CEIL_ROUND (function->frame_offset, alignment); | |
654 | #endif | |
655 | ||
656 | /* On a big-endian machine, if we are allocating more space than we will use, | |
657 | use the least significant bytes of those that are allocated. */ | |
658 | #if BYTES_BIG_ENDIAN | |
659 | if (mode != BLKmode) | |
660 | bigend_correction = size - GET_MODE_SIZE (mode); | |
661 | #endif | |
662 | ||
663 | #ifdef FRAME_GROWS_DOWNWARD | |
664 | function->frame_offset -= size; | |
665 | #endif | |
666 | addr = plus_constant (virtual_stack_vars_rtx, | |
667 | function->frame_offset + bigend_correction); | |
668 | #ifndef FRAME_GROWS_DOWNWARD | |
669 | function->frame_offset += size; | |
670 | #endif | |
671 | ||
672 | x = gen_rtx (MEM, mode, addr); | |
673 | ||
674 | function->stack_slot_list | |
675 | = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list); | |
676 | ||
677 | pop_obstacks (); | |
678 | ||
679 | return x; | |
680 | } | |
681 | \f | |
682 | /* Allocate a temporary stack slot and record it for possible later | |
683 | reuse. | |
684 | ||
685 | MODE is the machine mode to be given to the returned rtx. | |
686 | ||
687 | SIZE is the size in units of the space required. We do no rounding here | |
688 | since assign_stack_local will do any required rounding. | |
689 | ||
690 | KEEP is non-zero if this slot is to be retained after a call to | |
691 | free_temp_slots. Automatic variables for a block are allocated with this | |
692 | flag. */ | |
693 | ||
694 | rtx | |
695 | assign_stack_temp (mode, size, keep) | |
696 | enum machine_mode mode; | |
697 | int size; | |
698 | int keep; | |
699 | { | |
700 | struct temp_slot *p, *best_p = 0; | |
701 | ||
702 | /* First try to find an available, already-allocated temporary that is the | |
703 | exact size we require. */ | |
704 | for (p = temp_slots; p; p = p->next) | |
705 | if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use) | |
706 | break; | |
707 | ||
708 | /* If we didn't find, one, try one that is larger than what we want. We | |
709 | find the smallest such. */ | |
710 | if (p == 0) | |
711 | for (p = temp_slots; p; p = p->next) | |
712 | if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use | |
713 | && (best_p == 0 || best_p->size > p->size)) | |
714 | best_p = p; | |
715 | ||
716 | /* Make our best, if any, the one to use. */ | |
717 | if (best_p) | |
718 | { | |
719 | /* If there are enough aligned bytes left over, make them into a new | |
720 | temp_slot so that the extra bytes don't get wasted. Do this only | |
721 | for BLKmode slots, so that we can be sure of the alignment. */ | |
722 | if (GET_MODE (best_p->slot) == BLKmode) | |
723 | { | |
724 | int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
725 | int rounded_size = CEIL_ROUND (size, alignment); | |
726 | ||
727 | if (best_p->size - rounded_size >= alignment) | |
728 | { | |
729 | p = (struct temp_slot *) oballoc (sizeof (struct temp_slot)); | |
730 | p->in_use = 0; | |
731 | p->size = best_p->size - rounded_size; | |
732 | p->slot = gen_rtx (MEM, BLKmode, | |
733 | plus_constant (XEXP (best_p->slot, 0), | |
734 | rounded_size)); | |
735 | p->next = temp_slots; | |
736 | temp_slots = p; | |
737 | ||
738 | stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot, | |
739 | stack_slot_list); | |
740 | ||
741 | best_p->size = rounded_size; | |
742 | } | |
743 | } | |
744 | ||
745 | p = best_p; | |
746 | } | |
747 | ||
748 | ||
749 | /* If we still didn't find one, make a new temporary. */ | |
750 | if (p == 0) | |
751 | { | |
752 | p = (struct temp_slot *) oballoc (sizeof (struct temp_slot)); | |
753 | p->size = size; | |
754 | /* If the temp slot mode doesn't indicate the alignment, | |
755 | use the largest possible, so no one will be disappointed. */ | |
756 | p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0); | |
757 | p->next = temp_slots; | |
758 | temp_slots = p; | |
759 | } | |
760 | ||
761 | p->in_use = 1; | |
762 | p->level = temp_slot_level; | |
763 | p->keep = keep; | |
764 | return p->slot; | |
765 | } | |
766 | ||
767 | /* Combine temporary stack slots which are adjacent on the stack. | |
768 | ||
769 | This allows for better use of already allocated stack space. This is only | |
770 | done for BLKmode slots because we can be sure that we won't have alignment | |
771 | problems in this case. */ | |
772 | ||
773 | void | |
774 | combine_temp_slots () | |
775 | { | |
776 | struct temp_slot *p, *q; | |
777 | struct temp_slot *prev_p, *prev_q; | |
778 | ||
779 | for (p = temp_slots, prev_p = 0; p; prev_p = p, p = p->next) | |
780 | if (! p->in_use && GET_MODE (p->slot) == BLKmode) | |
781 | for (q = p->next, prev_q = p; q; prev_q = q, q = q->next) | |
782 | if (! q->in_use && GET_MODE (q->slot) == BLKmode) | |
783 | { | |
784 | if (rtx_equal_p (plus_constant (XEXP (p->slot, 0), p->size), | |
785 | XEXP (q->slot, 0))) | |
786 | { | |
787 | /* Combine q into p. */ | |
788 | p->size += q->size; | |
789 | prev_q->next = q->next; | |
790 | } | |
791 | else if (rtx_equal_p (plus_constant (XEXP (q->slot, 0), q->size), | |
792 | XEXP (p->slot, 0))) | |
793 | { | |
794 | /* Combine p into q. */ | |
795 | q->size += p->size; | |
796 | if (prev_p) | |
797 | prev_p->next = p->next; | |
798 | else | |
799 | temp_slots = p->next; | |
800 | } | |
801 | } | |
802 | } | |
803 | \f | |
804 | /* If X could be a reference to a temporary slot, mark that slot as belonging | |
805 | to the to one level higher. If X matched one of our slots, just mark that | |
806 | one. Otherwise, we can't easily predict which it is, so upgrade all of | |
807 | them. Kept slots need not be touched. | |
808 | ||
809 | This is called when an ({...}) construct occurs and a statement | |
810 | returns a value in memory. */ | |
811 | ||
812 | void | |
813 | preserve_temp_slots (x) | |
814 | rtx x; | |
815 | { | |
816 | struct temp_slot *p; | |
817 | ||
818 | /* If X is not in memory or is at a constant address, it cannot be in | |
819 | a temporary slot. */ | |
820 | if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))) | |
821 | return; | |
822 | ||
823 | /* First see if we can find a match. */ | |
824 | for (p = temp_slots; p; p = p->next) | |
825 | if (p->in_use && x == p->slot) | |
826 | { | |
827 | p->level--; | |
828 | return; | |
829 | } | |
830 | ||
831 | /* Otherwise, preserve all non-kept slots at this level. */ | |
832 | for (p = temp_slots; p; p = p->next) | |
833 | if (p->in_use && p->level == temp_slot_level && ! p->keep) | |
834 | p->level--; | |
835 | } | |
836 | ||
837 | /* Free all temporaries used so far. This is normally called at the end | |
838 | of generating code for a statement. */ | |
839 | ||
840 | void | |
841 | free_temp_slots () | |
842 | { | |
843 | struct temp_slot *p; | |
844 | ||
845 | for (p = temp_slots; p; p = p->next) | |
846 | if (p->in_use && p->level == temp_slot_level && ! p->keep) | |
847 | p->in_use = 0; | |
848 | ||
849 | combine_temp_slots (); | |
850 | } | |
851 | ||
852 | /* Push deeper into the nesting level for stack temporaries. */ | |
853 | ||
854 | void | |
855 | push_temp_slots () | |
856 | { | |
857 | /* For GNU C++, we must allow a sequence to be emitted anywhere in | |
858 | the level where the sequence was started. By not changing levels | |
859 | when the compiler is inside a sequence, the temporaries for the | |
860 | sequence and the temporaries will not unwittingly conflict with | |
861 | the temporaries for other sequences and/or code at that level. */ | |
862 | if (in_sequence_p ()) | |
863 | return; | |
864 | ||
865 | temp_slot_level++; | |
866 | } | |
867 | ||
868 | /* Pop a temporary nesting level. All slots in use in the current level | |
869 | are freed. */ | |
870 | ||
871 | void | |
872 | pop_temp_slots () | |
873 | { | |
874 | struct temp_slot *p; | |
875 | ||
876 | /* See comment in push_temp_slots about why we don't change levels | |
877 | in sequences. */ | |
878 | if (in_sequence_p ()) | |
879 | return; | |
880 | ||
881 | for (p = temp_slots; p; p = p->next) | |
882 | if (p->in_use && p->level == temp_slot_level) | |
883 | p->in_use = 0; | |
884 | ||
885 | combine_temp_slots (); | |
886 | ||
887 | temp_slot_level--; | |
888 | } | |
889 | \f | |
890 | /* Retroactively move an auto variable from a register to a stack slot. | |
891 | This is done when an address-reference to the variable is seen. */ | |
892 | ||
893 | void | |
894 | put_var_into_stack (decl) | |
895 | tree decl; | |
896 | { | |
897 | register rtx reg; | |
898 | enum machine_mode promoted_mode, decl_mode; | |
899 | struct function *function = 0; | |
900 | tree context = decl_function_context (decl); | |
901 | ||
902 | /* Get the current rtl used for this object and it's original mode. */ | |
903 | reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl); | |
904 | ||
905 | /* No need to do anything if decl has no rtx yet | |
906 | since in that case caller is setting TREE_ADDRESSABLE | |
907 | and a stack slot will be assigned when the rtl is made. */ | |
908 | if (reg == 0) | |
909 | return; | |
910 | ||
911 | /* Get the declared mode for this object. */ | |
912 | decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl)) | |
913 | : DECL_MODE (decl)); | |
914 | /* Get the mode it's actually stored in. */ | |
915 | promoted_mode = GET_MODE (reg); | |
916 | ||
917 | /* If this variable comes from an outer function, | |
918 | find that function's saved context. */ | |
919 | if (context != current_function_decl) | |
920 | for (function = outer_function_chain; function; function = function->next) | |
921 | if (function->decl == context) | |
922 | break; | |
923 | ||
924 | /* If this is a variable-size object with a pseudo to address it, | |
925 | put that pseudo into the stack, if the var is nonlocal. */ | |
926 | if (DECL_NONLOCAL (decl) | |
927 | && GET_CODE (reg) == MEM | |
928 | && GET_CODE (XEXP (reg, 0)) == REG | |
929 | && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER) | |
930 | { | |
931 | reg = XEXP (reg, 0); | |
932 | decl_mode = promoted_mode = GET_MODE (reg); | |
933 | } | |
934 | ||
935 | /* Now we should have a value that resides in one or more pseudo regs. */ | |
936 | ||
937 | if (GET_CODE (reg) == REG) | |
938 | put_reg_into_stack (function, reg, TREE_TYPE (decl), | |
939 | promoted_mode, decl_mode); | |
940 | else if (GET_CODE (reg) == CONCAT) | |
941 | { | |
942 | /* A CONCAT contains two pseudos; put them both in the stack. | |
943 | We do it so they end up consecutive. */ | |
944 | enum machine_mode part_mode = GET_MODE (XEXP (reg, 0)); | |
945 | tree part_type = TREE_TYPE (TREE_TYPE (decl)); | |
946 | #ifdef STACK_GROWS_DOWNWARD | |
947 | /* Since part 0 should have a lower address, do it second. */ | |
948 | put_reg_into_stack (function, XEXP (reg, 1), | |
949 | part_type, part_mode, part_mode); | |
950 | put_reg_into_stack (function, XEXP (reg, 0), | |
951 | part_type, part_mode, part_mode); | |
952 | #else | |
953 | put_reg_into_stack (function, XEXP (reg, 0), | |
954 | part_type, part_mode, part_mode); | |
955 | put_reg_into_stack (function, XEXP (reg, 1), | |
956 | part_type, part_mode, part_mode); | |
957 | #endif | |
958 | ||
959 | /* Change the CONCAT into a combined MEM for both parts. */ | |
960 | PUT_CODE (reg, MEM); | |
961 | /* The two parts are in memory order already. | |
962 | Use the lower parts address as ours. */ | |
963 | XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0); | |
964 | /* Prevent sharing of rtl that might lose. */ | |
965 | if (GET_CODE (XEXP (reg, 0)) == PLUS) | |
966 | XEXP (reg, 0) = copy_rtx (XEXP (reg, 0)); | |
967 | } | |
968 | } | |
969 | ||
970 | /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG | |
971 | into the stack frame of FUNCTION (0 means the current function). | |
972 | DECL_MODE is the machine mode of the user-level data type. | |
973 | PROMOTED_MODE is the machine mode of the register. */ | |
974 | ||
975 | static void | |
976 | put_reg_into_stack (function, reg, type, promoted_mode, decl_mode) | |
977 | struct function *function; | |
978 | rtx reg; | |
979 | tree type; | |
980 | enum machine_mode promoted_mode, decl_mode; | |
981 | { | |
982 | rtx new = 0; | |
983 | ||
984 | if (function) | |
985 | { | |
986 | if (REGNO (reg) < function->max_parm_reg) | |
987 | new = function->parm_reg_stack_loc[REGNO (reg)]; | |
988 | if (new == 0) | |
989 | new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), | |
990 | 0, function); | |
991 | } | |
992 | else | |
993 | { | |
994 | if (REGNO (reg) < max_parm_reg) | |
995 | new = parm_reg_stack_loc[REGNO (reg)]; | |
996 | if (new == 0) | |
997 | new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0); | |
998 | } | |
999 | ||
1000 | XEXP (reg, 0) = XEXP (new, 0); | |
1001 | /* `volatil' bit means one thing for MEMs, another entirely for REGs. */ | |
1002 | REG_USERVAR_P (reg) = 0; | |
1003 | PUT_CODE (reg, MEM); | |
1004 | PUT_MODE (reg, decl_mode); | |
1005 | ||
1006 | /* If this is a memory ref that contains aggregate components, | |
1007 | mark it as such for cse and loop optimize. */ | |
1008 | MEM_IN_STRUCT_P (reg) | |
1009 | = (TREE_CODE (type) == ARRAY_TYPE | |
1010 | || TREE_CODE (type) == RECORD_TYPE | |
1011 | || TREE_CODE (type) == UNION_TYPE | |
1012 | || TREE_CODE (type) == QUAL_UNION_TYPE); | |
1013 | ||
1014 | /* Now make sure that all refs to the variable, previously made | |
1015 | when it was a register, are fixed up to be valid again. */ | |
1016 | if (function) | |
1017 | { | |
1018 | struct var_refs_queue *temp; | |
1019 | ||
1020 | /* Variable is inherited; fix it up when we get back to its function. */ | |
1021 | push_obstacks (function->function_obstack, | |
1022 | function->function_maybepermanent_obstack); | |
1023 | temp | |
1024 | = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue)); | |
1025 | temp->modified = reg; | |
1026 | temp->promoted_mode = promoted_mode; | |
1027 | temp->unsignedp = TREE_UNSIGNED (type); | |
1028 | temp->next = function->fixup_var_refs_queue; | |
1029 | function->fixup_var_refs_queue = temp; | |
1030 | pop_obstacks (); | |
1031 | } | |
1032 | else | |
1033 | /* Variable is local; fix it up now. */ | |
1034 | fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type)); | |
1035 | } | |
1036 | \f | |
1037 | static void | |
1038 | fixup_var_refs (var, promoted_mode, unsignedp) | |
1039 | rtx var; | |
1040 | enum machine_mode promoted_mode; | |
1041 | int unsignedp; | |
1042 | { | |
1043 | tree pending; | |
1044 | rtx first_insn = get_insns (); | |
1045 | struct sequence_stack *stack = sequence_stack; | |
1046 | tree rtl_exps = rtl_expr_chain; | |
1047 | ||
1048 | /* Must scan all insns for stack-refs that exceed the limit. */ | |
1049 | fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0); | |
1050 | ||
1051 | /* Scan all pending sequences too. */ | |
1052 | for (; stack; stack = stack->next) | |
1053 | { | |
1054 | push_to_sequence (stack->first); | |
1055 | fixup_var_refs_insns (var, promoted_mode, unsignedp, | |
1056 | stack->first, stack->next != 0); | |
1057 | /* Update remembered end of sequence | |
1058 | in case we added an insn at the end. */ | |
1059 | stack->last = get_last_insn (); | |
1060 | end_sequence (); | |
1061 | } | |
1062 | ||
1063 | /* Scan all waiting RTL_EXPRs too. */ | |
1064 | for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending)) | |
1065 | { | |
1066 | rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending)); | |
1067 | if (seq != const0_rtx && seq != 0) | |
1068 | { | |
1069 | push_to_sequence (seq); | |
1070 | fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0); | |
1071 | end_sequence (); | |
1072 | } | |
1073 | } | |
1074 | } | |
1075 | \f | |
1076 | /* This structure is used by the following two functions to record MEMs or | |
1077 | pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing | |
1078 | VAR as an address. We need to maintain this list in case two operands of | |
1079 | an insn were required to match; in that case we must ensure we use the | |
1080 | same replacement. */ | |
1081 | ||
1082 | struct fixup_replacement | |
1083 | { | |
1084 | rtx old; | |
1085 | rtx new; | |
1086 | struct fixup_replacement *next; | |
1087 | }; | |
1088 | ||
1089 | /* REPLACEMENTS is a pointer to a list of the above structures and X is | |
1090 | some part of an insn. Return a struct fixup_replacement whose OLD | |
1091 | value is equal to X. Allocate a new structure if no such entry exists. */ | |
1092 | ||
1093 | static struct fixup_replacement * | |
1094 | find_fixup_replacement (replacements, x) | |
1095 | struct fixup_replacement **replacements; | |
1096 | rtx x; | |
1097 | { | |
1098 | struct fixup_replacement *p; | |
1099 | ||
1100 | /* See if we have already replaced this. */ | |
1101 | for (p = *replacements; p && p->old != x; p = p->next) | |
1102 | ; | |
1103 | ||
1104 | if (p == 0) | |
1105 | { | |
1106 | p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement)); | |
1107 | p->old = x; | |
1108 | p->new = 0; | |
1109 | p->next = *replacements; | |
1110 | *replacements = p; | |
1111 | } | |
1112 | ||
1113 | return p; | |
1114 | } | |
1115 | ||
1116 | /* Scan the insn-chain starting with INSN for refs to VAR | |
1117 | and fix them up. TOPLEVEL is nonzero if this chain is the | |
1118 | main chain of insns for the current function. */ | |
1119 | ||
1120 | static void | |
1121 | fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel) | |
1122 | rtx var; | |
1123 | enum machine_mode promoted_mode; | |
1124 | int unsignedp; | |
1125 | rtx insn; | |
1126 | int toplevel; | |
1127 | { | |
1128 | rtx call_dest = 0; | |
1129 | ||
1130 | while (insn) | |
1131 | { | |
1132 | rtx next = NEXT_INSN (insn); | |
1133 | rtx note; | |
1134 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
1135 | { | |
1136 | /* The insn to load VAR from a home in the arglist | |
1137 | is now a no-op. When we see it, just delete it. */ | |
1138 | if (toplevel | |
1139 | && GET_CODE (PATTERN (insn)) == SET | |
1140 | && SET_DEST (PATTERN (insn)) == var | |
1141 | /* If this represents the result of an insn group, | |
1142 | don't delete the insn. */ | |
1143 | && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0 | |
1144 | && rtx_equal_p (SET_SRC (PATTERN (insn)), var)) | |
1145 | { | |
1146 | /* In unoptimized compilation, we shouldn't call delete_insn | |
1147 | except in jump.c doing warnings. */ | |
1148 | PUT_CODE (insn, NOTE); | |
1149 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1150 | NOTE_SOURCE_FILE (insn) = 0; | |
1151 | if (insn == last_parm_insn) | |
1152 | last_parm_insn = PREV_INSN (next); | |
1153 | } | |
1154 | else | |
1155 | { | |
1156 | struct fixup_replacement *replacements = 0; | |
1157 | rtx next_insn = NEXT_INSN (insn); | |
1158 | ||
1159 | #ifdef SMALL_REGISTER_CLASSES | |
1160 | /* If the insn that copies the results of a CALL_INSN | |
1161 | into a pseudo now references VAR, we have to use an | |
1162 | intermediate pseudo since we want the life of the | |
1163 | return value register to be only a single insn. | |
1164 | ||
1165 | If we don't use an intermediate pseudo, such things as | |
1166 | address computations to make the address of VAR valid | |
1167 | if it is not can be placed beween the CALL_INSN and INSN. | |
1168 | ||
1169 | To make sure this doesn't happen, we record the destination | |
1170 | of the CALL_INSN and see if the next insn uses both that | |
1171 | and VAR. */ | |
1172 | ||
1173 | if (call_dest != 0 && GET_CODE (insn) == INSN | |
1174 | && reg_mentioned_p (var, PATTERN (insn)) | |
1175 | && reg_mentioned_p (call_dest, PATTERN (insn))) | |
1176 | { | |
1177 | rtx temp = gen_reg_rtx (GET_MODE (call_dest)); | |
1178 | ||
1179 | emit_insn_before (gen_move_insn (temp, call_dest), insn); | |
1180 | ||
1181 | PATTERN (insn) = replace_rtx (PATTERN (insn), | |
1182 | call_dest, temp); | |
1183 | } | |
1184 | ||
1185 | if (GET_CODE (insn) == CALL_INSN | |
1186 | && GET_CODE (PATTERN (insn)) == SET) | |
1187 | call_dest = SET_DEST (PATTERN (insn)); | |
1188 | else if (GET_CODE (insn) == CALL_INSN | |
1189 | && GET_CODE (PATTERN (insn)) == PARALLEL | |
1190 | && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) | |
1191 | call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0)); | |
1192 | else | |
1193 | call_dest = 0; | |
1194 | #endif | |
1195 | ||
1196 | /* See if we have to do anything to INSN now that VAR is in | |
1197 | memory. If it needs to be loaded into a pseudo, use a single | |
1198 | pseudo for the entire insn in case there is a MATCH_DUP | |
1199 | between two operands. We pass a pointer to the head of | |
1200 | a list of struct fixup_replacements. If fixup_var_refs_1 | |
1201 | needs to allocate pseudos or replacement MEMs (for SUBREGs), | |
1202 | it will record them in this list. | |
1203 | ||
1204 | If it allocated a pseudo for any replacement, we copy into | |
1205 | it here. */ | |
1206 | ||
1207 | fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn, | |
1208 | &replacements); | |
1209 | ||
1210 | /* If this is last_parm_insn, and any instructions were output | |
1211 | after it to fix it up, then we must set last_parm_insn to | |
1212 | the last such instruction emitted. */ | |
1213 | if (insn == last_parm_insn) | |
1214 | last_parm_insn = PREV_INSN (next_insn); | |
1215 | ||
1216 | while (replacements) | |
1217 | { | |
1218 | if (GET_CODE (replacements->new) == REG) | |
1219 | { | |
1220 | rtx insert_before; | |
1221 | rtx seq; | |
1222 | ||
1223 | /* OLD might be a (subreg (mem)). */ | |
1224 | if (GET_CODE (replacements->old) == SUBREG) | |
1225 | replacements->old | |
1226 | = fixup_memory_subreg (replacements->old, insn, 0); | |
1227 | else | |
1228 | replacements->old | |
1229 | = fixup_stack_1 (replacements->old, insn); | |
1230 | ||
1231 | /* We can not separate USE insns from the CALL_INSN | |
1232 | that they belong to. If this is a CALL_INSN, insert | |
1233 | the move insn before the USE insns preceding it | |
1234 | instead of immediately before the insn. */ | |
1235 | if (GET_CODE (insn) == CALL_INSN) | |
1236 | { | |
1237 | insert_before = insn; | |
1238 | while (GET_CODE (PREV_INSN (insert_before)) == INSN | |
1239 | && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE) | |
1240 | insert_before = PREV_INSN (insert_before); | |
1241 | } | |
1242 | else | |
1243 | insert_before = insn; | |
1244 | ||
1245 | /* If we are changing the mode, do a conversion. | |
1246 | This might be wasteful, but combine.c will | |
1247 | eliminate much of the waste. */ | |
1248 | ||
1249 | if (GET_MODE (replacements->new) | |
1250 | != GET_MODE (replacements->old)) | |
1251 | { | |
1252 | start_sequence (); | |
1253 | convert_move (replacements->new, | |
1254 | replacements->old, unsignedp); | |
1255 | seq = gen_sequence (); | |
1256 | end_sequence (); | |
1257 | } | |
1258 | else | |
1259 | seq = gen_move_insn (replacements->new, | |
1260 | replacements->old); | |
1261 | ||
1262 | emit_insn_before (seq, insert_before); | |
1263 | } | |
1264 | ||
1265 | replacements = replacements->next; | |
1266 | } | |
1267 | } | |
1268 | ||
1269 | /* Also fix up any invalid exprs in the REG_NOTES of this insn. | |
1270 | But don't touch other insns referred to by reg-notes; | |
1271 | we will get them elsewhere. */ | |
1272 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
1273 | if (GET_CODE (note) != INSN_LIST) | |
1274 | XEXP (note, 0) | |
1275 | = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1); | |
1276 | } | |
1277 | insn = next; | |
1278 | } | |
1279 | } | |
1280 | \f | |
1281 | /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE. | |
1282 | See if the rtx expression at *LOC in INSN needs to be changed. | |
1283 | ||
1284 | REPLACEMENTS is a pointer to a list head that starts out zero, but may | |
1285 | contain a list of original rtx's and replacements. If we find that we need | |
1286 | to modify this insn by replacing a memory reference with a pseudo or by | |
1287 | making a new MEM to implement a SUBREG, we consult that list to see if | |
1288 | we have already chosen a replacement. If none has already been allocated, | |
1289 | we allocate it and update the list. fixup_var_refs_insns will copy VAR | |
1290 | or the SUBREG, as appropriate, to the pseudo. */ | |
1291 | ||
1292 | static void | |
1293 | fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements) | |
1294 | register rtx var; | |
1295 | enum machine_mode promoted_mode; | |
1296 | register rtx *loc; | |
1297 | rtx insn; | |
1298 | struct fixup_replacement **replacements; | |
1299 | { | |
1300 | register int i; | |
1301 | register rtx x = *loc; | |
1302 | RTX_CODE code = GET_CODE (x); | |
1303 | register char *fmt; | |
1304 | register rtx tem, tem1; | |
1305 | struct fixup_replacement *replacement; | |
1306 | ||
1307 | switch (code) | |
1308 | { | |
1309 | case MEM: | |
1310 | if (var == x) | |
1311 | { | |
1312 | /* If we already have a replacement, use it. Otherwise, | |
1313 | try to fix up this address in case it is invalid. */ | |
1314 | ||
1315 | replacement = find_fixup_replacement (replacements, var); | |
1316 | if (replacement->new) | |
1317 | { | |
1318 | *loc = replacement->new; | |
1319 | return; | |
1320 | } | |
1321 | ||
1322 | *loc = replacement->new = x = fixup_stack_1 (x, insn); | |
1323 | ||
1324 | /* Unless we are forcing memory to register or we changed the mode, | |
1325 | we can leave things the way they are if the insn is valid. */ | |
1326 | ||
1327 | INSN_CODE (insn) = -1; | |
1328 | if (! flag_force_mem && GET_MODE (x) == promoted_mode | |
1329 | && recog_memoized (insn) >= 0) | |
1330 | return; | |
1331 | ||
1332 | *loc = replacement->new = gen_reg_rtx (promoted_mode); | |
1333 | return; | |
1334 | } | |
1335 | ||
1336 | /* If X contains VAR, we need to unshare it here so that we update | |
1337 | each occurrence separately. But all identical MEMs in one insn | |
1338 | must be replaced with the same rtx because of the possibility of | |
1339 | MATCH_DUPs. */ | |
1340 | ||
1341 | if (reg_mentioned_p (var, x)) | |
1342 | { | |
1343 | replacement = find_fixup_replacement (replacements, x); | |
1344 | if (replacement->new == 0) | |
1345 | replacement->new = copy_most_rtx (x, var); | |
1346 | ||
1347 | *loc = x = replacement->new; | |
1348 | } | |
1349 | break; | |
1350 | ||
1351 | case REG: | |
1352 | case CC0: | |
1353 | case PC: | |
1354 | case CONST_INT: | |
1355 | case CONST: | |
1356 | case SYMBOL_REF: | |
1357 | case LABEL_REF: | |
1358 | case CONST_DOUBLE: | |
1359 | return; | |
1360 | ||
1361 | case SIGN_EXTRACT: | |
1362 | case ZERO_EXTRACT: | |
1363 | /* Note that in some cases those types of expressions are altered | |
1364 | by optimize_bit_field, and do not survive to get here. */ | |
1365 | if (XEXP (x, 0) == var | |
1366 | || (GET_CODE (XEXP (x, 0)) == SUBREG | |
1367 | && SUBREG_REG (XEXP (x, 0)) == var)) | |
1368 | { | |
1369 | /* Get TEM as a valid MEM in the mode presently in the insn. | |
1370 | ||
1371 | We don't worry about the possibility of MATCH_DUP here; it | |
1372 | is highly unlikely and would be tricky to handle. */ | |
1373 | ||
1374 | tem = XEXP (x, 0); | |
1375 | if (GET_CODE (tem) == SUBREG) | |
1376 | tem = fixup_memory_subreg (tem, insn, 1); | |
1377 | tem = fixup_stack_1 (tem, insn); | |
1378 | ||
1379 | /* Unless we want to load from memory, get TEM into the proper mode | |
1380 | for an extract from memory. This can only be done if the | |
1381 | extract is at a constant position and length. */ | |
1382 | ||
1383 | if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT | |
1384 | && GET_CODE (XEXP (x, 2)) == CONST_INT | |
1385 | && ! mode_dependent_address_p (XEXP (tem, 0)) | |
1386 | && ! MEM_VOLATILE_P (tem)) | |
1387 | { | |
1388 | enum machine_mode wanted_mode = VOIDmode; | |
1389 | enum machine_mode is_mode = GET_MODE (tem); | |
1390 | int width = INTVAL (XEXP (x, 1)); | |
1391 | int pos = INTVAL (XEXP (x, 2)); | |
1392 | ||
1393 | #ifdef HAVE_extzv | |
1394 | if (GET_CODE (x) == ZERO_EXTRACT) | |
1395 | wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1]; | |
1396 | #endif | |
1397 | #ifdef HAVE_extv | |
1398 | if (GET_CODE (x) == SIGN_EXTRACT) | |
1399 | wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1]; | |
1400 | #endif | |
1401 | /* If we have a narrower mode, we can do something. */ | |
1402 | if (wanted_mode != VOIDmode | |
1403 | && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) | |
1404 | { | |
1405 | int offset = pos / BITS_PER_UNIT; | |
1406 | rtx old_pos = XEXP (x, 2); | |
1407 | rtx newmem; | |
1408 | ||
1409 | /* If the bytes and bits are counted differently, we | |
1410 | must adjust the offset. */ | |
1411 | #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN | |
1412 | offset = (GET_MODE_SIZE (is_mode) | |
1413 | - GET_MODE_SIZE (wanted_mode) - offset); | |
1414 | #endif | |
1415 | ||
1416 | pos %= GET_MODE_BITSIZE (wanted_mode); | |
1417 | ||
1418 | newmem = gen_rtx (MEM, wanted_mode, | |
1419 | plus_constant (XEXP (tem, 0), offset)); | |
1420 | RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem); | |
1421 | MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem); | |
1422 | MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem); | |
1423 | ||
1424 | /* Make the change and see if the insn remains valid. */ | |
1425 | INSN_CODE (insn) = -1; | |
1426 | XEXP (x, 0) = newmem; | |
1427 | XEXP (x, 2) = GEN_INT (pos); | |
1428 | ||
1429 | if (recog_memoized (insn) >= 0) | |
1430 | return; | |
1431 | ||
1432 | /* Otherwise, restore old position. XEXP (x, 0) will be | |
1433 | restored later. */ | |
1434 | XEXP (x, 2) = old_pos; | |
1435 | } | |
1436 | } | |
1437 | ||
1438 | /* If we get here, the bitfield extract insn can't accept a memory | |
1439 | reference. Copy the input into a register. */ | |
1440 | ||
1441 | tem1 = gen_reg_rtx (GET_MODE (tem)); | |
1442 | emit_insn_before (gen_move_insn (tem1, tem), insn); | |
1443 | XEXP (x, 0) = tem1; | |
1444 | return; | |
1445 | } | |
1446 | break; | |
1447 | ||
1448 | case SUBREG: | |
1449 | if (SUBREG_REG (x) == var) | |
1450 | { | |
1451 | /* If this is a special SUBREG made because VAR was promoted | |
1452 | from a wider mode, replace it with VAR and call ourself | |
1453 | recursively, this time saying that the object previously | |
1454 | had its current mode (by virtue of the SUBREG). */ | |
1455 | ||
1456 | if (SUBREG_PROMOTED_VAR_P (x)) | |
1457 | { | |
1458 | *loc = var; | |
1459 | fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements); | |
1460 | return; | |
1461 | } | |
1462 | ||
1463 | /* If this SUBREG makes VAR wider, it has become a paradoxical | |
1464 | SUBREG with VAR in memory, but these aren't allowed at this | |
1465 | stage of the compilation. So load VAR into a pseudo and take | |
1466 | a SUBREG of that pseudo. */ | |
1467 | if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var))) | |
1468 | { | |
1469 | replacement = find_fixup_replacement (replacements, var); | |
1470 | if (replacement->new == 0) | |
1471 | replacement->new = gen_reg_rtx (GET_MODE (var)); | |
1472 | SUBREG_REG (x) = replacement->new; | |
1473 | return; | |
1474 | } | |
1475 | ||
1476 | /* See if we have already found a replacement for this SUBREG. | |
1477 | If so, use it. Otherwise, make a MEM and see if the insn | |
1478 | is recognized. If not, or if we should force MEM into a register, | |
1479 | make a pseudo for this SUBREG. */ | |
1480 | replacement = find_fixup_replacement (replacements, x); | |
1481 | if (replacement->new) | |
1482 | { | |
1483 | *loc = replacement->new; | |
1484 | return; | |
1485 | } | |
1486 | ||
1487 | replacement->new = *loc = fixup_memory_subreg (x, insn, 0); | |
1488 | ||
1489 | INSN_CODE (insn) = -1; | |
1490 | if (! flag_force_mem && recog_memoized (insn) >= 0) | |
1491 | return; | |
1492 | ||
1493 | *loc = replacement->new = gen_reg_rtx (GET_MODE (x)); | |
1494 | return; | |
1495 | } | |
1496 | break; | |
1497 | ||
1498 | case SET: | |
1499 | /* First do special simplification of bit-field references. */ | |
1500 | if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT | |
1501 | || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT) | |
1502 | optimize_bit_field (x, insn, 0); | |
1503 | if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT | |
1504 | || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT) | |
1505 | optimize_bit_field (x, insn, NULL_PTR); | |
1506 | ||
1507 | /* If SET_DEST is now a paradoxical SUBREG, put the result of this | |
1508 | insn into a pseudo and store the low part of the pseudo into VAR. */ | |
1509 | if (GET_CODE (SET_DEST (x)) == SUBREG | |
1510 | && SUBREG_REG (SET_DEST (x)) == var | |
1511 | && (GET_MODE_SIZE (GET_MODE (SET_DEST (x))) | |
1512 | > GET_MODE_SIZE (GET_MODE (var)))) | |
1513 | { | |
1514 | SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x))); | |
1515 | emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var), | |
1516 | tem)), | |
1517 | insn); | |
1518 | break; | |
1519 | } | |
1520 | ||
1521 | { | |
1522 | rtx dest = SET_DEST (x); | |
1523 | rtx src = SET_SRC (x); | |
1524 | rtx outerdest = dest; | |
1525 | ||
1526 | while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART | |
1527 | || GET_CODE (dest) == SIGN_EXTRACT | |
1528 | || GET_CODE (dest) == ZERO_EXTRACT) | |
1529 | dest = XEXP (dest, 0); | |
1530 | ||
1531 | if (GET_CODE (src) == SUBREG) | |
1532 | src = XEXP (src, 0); | |
1533 | ||
1534 | /* If VAR does not appear at the top level of the SET | |
1535 | just scan the lower levels of the tree. */ | |
1536 | ||
1537 | if (src != var && dest != var) | |
1538 | break; | |
1539 | ||
1540 | /* We will need to rerecognize this insn. */ | |
1541 | INSN_CODE (insn) = -1; | |
1542 | ||
1543 | #ifdef HAVE_insv | |
1544 | if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var) | |
1545 | { | |
1546 | /* Since this case will return, ensure we fixup all the | |
1547 | operands here. */ | |
1548 | fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1), | |
1549 | insn, replacements); | |
1550 | fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2), | |
1551 | insn, replacements); | |
1552 | fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x), | |
1553 | insn, replacements); | |
1554 | ||
1555 | tem = XEXP (outerdest, 0); | |
1556 | ||
1557 | /* Clean up (SUBREG:SI (MEM:mode ...) 0) | |
1558 | that may appear inside a ZERO_EXTRACT. | |
1559 | This was legitimate when the MEM was a REG. */ | |
1560 | if (GET_CODE (tem) == SUBREG | |
1561 | && SUBREG_REG (tem) == var) | |
1562 | tem = fixup_memory_subreg (tem, insn, 1); | |
1563 | else | |
1564 | tem = fixup_stack_1 (tem, insn); | |
1565 | ||
1566 | if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT | |
1567 | && GET_CODE (XEXP (outerdest, 2)) == CONST_INT | |
1568 | && ! mode_dependent_address_p (XEXP (tem, 0)) | |
1569 | && ! MEM_VOLATILE_P (tem)) | |
1570 | { | |
1571 | enum machine_mode wanted_mode | |
1572 | = insn_operand_mode[(int) CODE_FOR_insv][0]; | |
1573 | enum machine_mode is_mode = GET_MODE (tem); | |
1574 | int width = INTVAL (XEXP (outerdest, 1)); | |
1575 | int pos = INTVAL (XEXP (outerdest, 2)); | |
1576 | ||
1577 | /* If we have a narrower mode, we can do something. */ | |
1578 | if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) | |
1579 | { | |
1580 | int offset = pos / BITS_PER_UNIT; | |
1581 | rtx old_pos = XEXP (outerdest, 2); | |
1582 | rtx newmem; | |
1583 | ||
1584 | #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN | |
1585 | offset = (GET_MODE_SIZE (is_mode) | |
1586 | - GET_MODE_SIZE (wanted_mode) - offset); | |
1587 | #endif | |
1588 | ||
1589 | pos %= GET_MODE_BITSIZE (wanted_mode); | |
1590 | ||
1591 | newmem = gen_rtx (MEM, wanted_mode, | |
1592 | plus_constant (XEXP (tem, 0), offset)); | |
1593 | RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem); | |
1594 | MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem); | |
1595 | MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem); | |
1596 | ||
1597 | /* Make the change and see if the insn remains valid. */ | |
1598 | INSN_CODE (insn) = -1; | |
1599 | XEXP (outerdest, 0) = newmem; | |
1600 | XEXP (outerdest, 2) = GEN_INT (pos); | |
1601 | ||
1602 | if (recog_memoized (insn) >= 0) | |
1603 | return; | |
1604 | ||
1605 | /* Otherwise, restore old position. XEXP (x, 0) will be | |
1606 | restored later. */ | |
1607 | XEXP (outerdest, 2) = old_pos; | |
1608 | } | |
1609 | } | |
1610 | ||
1611 | /* If we get here, the bit-field store doesn't allow memory | |
1612 | or isn't located at a constant position. Load the value into | |
1613 | a register, do the store, and put it back into memory. */ | |
1614 | ||
1615 | tem1 = gen_reg_rtx (GET_MODE (tem)); | |
1616 | emit_insn_before (gen_move_insn (tem1, tem), insn); | |
1617 | emit_insn_after (gen_move_insn (tem, tem1), insn); | |
1618 | XEXP (outerdest, 0) = tem1; | |
1619 | return; | |
1620 | } | |
1621 | #endif | |
1622 | ||
1623 | /* STRICT_LOW_PART is a no-op on memory references | |
1624 | and it can cause combinations to be unrecognizable, | |
1625 | so eliminate it. */ | |
1626 | ||
1627 | if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART) | |
1628 | SET_DEST (x) = XEXP (SET_DEST (x), 0); | |
1629 | ||
1630 | /* A valid insn to copy VAR into or out of a register | |
1631 | must be left alone, to avoid an infinite loop here. | |
1632 | If the reference to VAR is by a subreg, fix that up, | |
1633 | since SUBREG is not valid for a memref. | |
1634 | Also fix up the address of the stack slot. | |
1635 | ||
1636 | Note that we must not try to recognize the insn until | |
1637 | after we know that we have valid addresses and no | |
1638 | (subreg (mem ...) ...) constructs, since these interfere | |
1639 | with determining the validity of the insn. */ | |
1640 | ||
1641 | if ((SET_SRC (x) == var | |
1642 | || (GET_CODE (SET_SRC (x)) == SUBREG | |
1643 | && SUBREG_REG (SET_SRC (x)) == var)) | |
1644 | && (GET_CODE (SET_DEST (x)) == REG | |
1645 | || (GET_CODE (SET_DEST (x)) == SUBREG | |
1646 | && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)) | |
1647 | && x == single_set (PATTERN (insn))) | |
1648 | { | |
1649 | rtx pat; | |
1650 | ||
1651 | replacement = find_fixup_replacement (replacements, SET_SRC (x)); | |
1652 | if (replacement->new) | |
1653 | SET_SRC (x) = replacement->new; | |
1654 | else if (GET_CODE (SET_SRC (x)) == SUBREG) | |
1655 | SET_SRC (x) = replacement->new | |
1656 | = fixup_memory_subreg (SET_SRC (x), insn, 0); | |
1657 | else | |
1658 | SET_SRC (x) = replacement->new | |
1659 | = fixup_stack_1 (SET_SRC (x), insn); | |
1660 | ||
1661 | if (recog_memoized (insn) >= 0) | |
1662 | return; | |
1663 | ||
1664 | /* INSN is not valid, but we know that we want to | |
1665 | copy SET_SRC (x) to SET_DEST (x) in some way. So | |
1666 | we generate the move and see whether it requires more | |
1667 | than one insn. If it does, we emit those insns and | |
1668 | delete INSN. Otherwise, we an just replace the pattern | |
1669 | of INSN; we have already verified above that INSN has | |
1670 | no other function that to do X. */ | |
1671 | ||
1672 | pat = gen_move_insn (SET_DEST (x), SET_SRC (x)); | |
1673 | if (GET_CODE (pat) == SEQUENCE) | |
1674 | { | |
1675 | emit_insn_after (pat, insn); | |
1676 | PUT_CODE (insn, NOTE); | |
1677 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1678 | NOTE_SOURCE_FILE (insn) = 0; | |
1679 | } | |
1680 | else | |
1681 | PATTERN (insn) = pat; | |
1682 | ||
1683 | return; | |
1684 | } | |
1685 | ||
1686 | if ((SET_DEST (x) == var | |
1687 | || (GET_CODE (SET_DEST (x)) == SUBREG | |
1688 | && SUBREG_REG (SET_DEST (x)) == var)) | |
1689 | && (GET_CODE (SET_SRC (x)) == REG | |
1690 | || (GET_CODE (SET_SRC (x)) == SUBREG | |
1691 | && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG)) | |
1692 | && x == single_set (PATTERN (insn))) | |
1693 | { | |
1694 | rtx pat; | |
1695 | ||
1696 | if (GET_CODE (SET_DEST (x)) == SUBREG) | |
1697 | SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0); | |
1698 | else | |
1699 | SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn); | |
1700 | ||
1701 | if (recog_memoized (insn) >= 0) | |
1702 | return; | |
1703 | ||
1704 | pat = gen_move_insn (SET_DEST (x), SET_SRC (x)); | |
1705 | if (GET_CODE (pat) == SEQUENCE) | |
1706 | { | |
1707 | emit_insn_after (pat, insn); | |
1708 | PUT_CODE (insn, NOTE); | |
1709 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1710 | NOTE_SOURCE_FILE (insn) = 0; | |
1711 | } | |
1712 | else | |
1713 | PATTERN (insn) = pat; | |
1714 | ||
1715 | return; | |
1716 | } | |
1717 | ||
1718 | /* Otherwise, storing into VAR must be handled specially | |
1719 | by storing into a temporary and copying that into VAR | |
1720 | with a new insn after this one. Note that this case | |
1721 | will be used when storing into a promoted scalar since | |
1722 | the insn will now have different modes on the input | |
1723 | and output and hence will be invalid (except for the case | |
1724 | of setting it to a constant, which does not need any | |
1725 | change if it is valid). We generate extra code in that case, | |
1726 | but combine.c will eliminate it. */ | |
1727 | ||
1728 | if (dest == var) | |
1729 | { | |
1730 | rtx temp; | |
1731 | rtx fixeddest = SET_DEST (x); | |
1732 | ||
1733 | /* STRICT_LOW_PART can be discarded, around a MEM. */ | |
1734 | if (GET_CODE (fixeddest) == STRICT_LOW_PART) | |
1735 | fixeddest = XEXP (fixeddest, 0); | |
1736 | /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */ | |
1737 | if (GET_CODE (fixeddest) == SUBREG) | |
1738 | fixeddest = fixup_memory_subreg (fixeddest, insn, 0); | |
1739 | else | |
1740 | fixeddest = fixup_stack_1 (fixeddest, insn); | |
1741 | ||
1742 | temp = gen_reg_rtx (GET_MODE (SET_SRC (x)) == VOIDmode | |
1743 | ? GET_MODE (fixeddest) | |
1744 | : GET_MODE (SET_SRC (x))); | |
1745 | ||
1746 | emit_insn_after (gen_move_insn (fixeddest, | |
1747 | gen_lowpart (GET_MODE (fixeddest), | |
1748 | temp)), | |
1749 | insn); | |
1750 | ||
1751 | SET_DEST (x) = temp; | |
1752 | } | |
1753 | } | |
1754 | } | |
1755 | ||
1756 | /* Nothing special about this RTX; fix its operands. */ | |
1757 | ||
1758 | fmt = GET_RTX_FORMAT (code); | |
1759 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1760 | { | |
1761 | if (fmt[i] == 'e') | |
1762 | fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements); | |
1763 | if (fmt[i] == 'E') | |
1764 | { | |
1765 | register int j; | |
1766 | for (j = 0; j < XVECLEN (x, i); j++) | |
1767 | fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j), | |
1768 | insn, replacements); | |
1769 | } | |
1770 | } | |
1771 | } | |
1772 | \f | |
1773 | /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)), | |
1774 | return an rtx (MEM:m1 newaddr) which is equivalent. | |
1775 | If any insns must be emitted to compute NEWADDR, put them before INSN. | |
1776 | ||
1777 | UNCRITICAL nonzero means accept paradoxical subregs. | |
1778 | This is used for subregs found inside of ZERO_EXTRACTs and in REG_NOTES. */ | |
1779 | ||
1780 | static rtx | |
1781 | fixup_memory_subreg (x, insn, uncritical) | |
1782 | rtx x; | |
1783 | rtx insn; | |
1784 | int uncritical; | |
1785 | { | |
1786 | int offset = SUBREG_WORD (x) * UNITS_PER_WORD; | |
1787 | rtx addr = XEXP (SUBREG_REG (x), 0); | |
1788 | enum machine_mode mode = GET_MODE (x); | |
1789 | rtx saved, result; | |
1790 | ||
1791 | /* Paradoxical SUBREGs are usually invalid during RTL generation. */ | |
1792 | if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) | |
1793 | && ! uncritical) | |
1794 | abort (); | |
1795 | ||
1796 | #if BYTES_BIG_ENDIAN | |
1797 | offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
1798 | - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))); | |
1799 | #endif | |
1800 | addr = plus_constant (addr, offset); | |
1801 | if (!flag_force_addr && memory_address_p (mode, addr)) | |
1802 | /* Shortcut if no insns need be emitted. */ | |
1803 | return change_address (SUBREG_REG (x), mode, addr); | |
1804 | start_sequence (); | |
1805 | result = change_address (SUBREG_REG (x), mode, addr); | |
1806 | emit_insn_before (gen_sequence (), insn); | |
1807 | end_sequence (); | |
1808 | return result; | |
1809 | } | |
1810 | ||
1811 | /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X. | |
1812 | Replace subexpressions of X in place. | |
1813 | If X itself is a (SUBREG (MEM ...) ...), return the replacement expression. | |
1814 | Otherwise return X, with its contents possibly altered. | |
1815 | ||
1816 | If any insns must be emitted to compute NEWADDR, put them before INSN. | |
1817 | ||
1818 | UNCRITICAL is as in fixup_memory_subreg. */ | |
1819 | ||
1820 | static rtx | |
1821 | walk_fixup_memory_subreg (x, insn, uncritical) | |
1822 | register rtx x; | |
1823 | rtx insn; | |
1824 | int uncritical; | |
1825 | { | |
1826 | register enum rtx_code code; | |
1827 | register char *fmt; | |
1828 | register int i; | |
1829 | ||
1830 | if (x == 0) | |
1831 | return 0; | |
1832 | ||
1833 | code = GET_CODE (x); | |
1834 | ||
1835 | if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM) | |
1836 | return fixup_memory_subreg (x, insn, uncritical); | |
1837 | ||
1838 | /* Nothing special about this RTX; fix its operands. */ | |
1839 | ||
1840 | fmt = GET_RTX_FORMAT (code); | |
1841 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1842 | { | |
1843 | if (fmt[i] == 'e') | |
1844 | XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical); | |
1845 | if (fmt[i] == 'E') | |
1846 | { | |
1847 | register int j; | |
1848 | for (j = 0; j < XVECLEN (x, i); j++) | |
1849 | XVECEXP (x, i, j) | |
1850 | = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical); | |
1851 | } | |
1852 | } | |
1853 | return x; | |
1854 | } | |
1855 | \f | |
1856 | #if 0 | |
1857 | /* Fix up any references to stack slots that are invalid memory addresses | |
1858 | because they exceed the maximum range of a displacement. */ | |
1859 | ||
1860 | void | |
1861 | fixup_stack_slots () | |
1862 | { | |
1863 | register rtx insn; | |
1864 | ||
1865 | /* Did we generate a stack slot that is out of range | |
1866 | or otherwise has an invalid address? */ | |
1867 | if (invalid_stack_slot) | |
1868 | { | |
1869 | /* Yes. Must scan all insns for stack-refs that exceed the limit. */ | |
1870 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
1871 | if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN | |
1872 | || GET_CODE (insn) == JUMP_INSN) | |
1873 | fixup_stack_1 (PATTERN (insn), insn); | |
1874 | } | |
1875 | } | |
1876 | #endif | |
1877 | ||
1878 | /* For each memory ref within X, if it refers to a stack slot | |
1879 | with an out of range displacement, put the address in a temp register | |
1880 | (emitting new insns before INSN to load these registers) | |
1881 | and alter the memory ref to use that register. | |
1882 | Replace each such MEM rtx with a copy, to avoid clobberage. */ | |
1883 | ||
1884 | static rtx | |
1885 | fixup_stack_1 (x, insn) | |
1886 | rtx x; | |
1887 | rtx insn; | |
1888 | { | |
1889 | register int i; | |
1890 | register RTX_CODE code = GET_CODE (x); | |
1891 | register char *fmt; | |
1892 | ||
1893 | if (code == MEM) | |
1894 | { | |
1895 | register rtx ad = XEXP (x, 0); | |
1896 | /* If we have address of a stack slot but it's not valid | |
1897 | (displacement is too large), compute the sum in a register. */ | |
1898 | if (GET_CODE (ad) == PLUS | |
1899 | && GET_CODE (XEXP (ad, 0)) == REG | |
1900 | && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER | |
1901 | && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER) | |
1902 | || XEXP (ad, 0) == current_function_internal_arg_pointer) | |
1903 | && GET_CODE (XEXP (ad, 1)) == CONST_INT) | |
1904 | { | |
1905 | rtx temp, seq; | |
1906 | if (memory_address_p (GET_MODE (x), ad)) | |
1907 | return x; | |
1908 | ||
1909 | start_sequence (); | |
1910 | temp = copy_to_reg (ad); | |
1911 | seq = gen_sequence (); | |
1912 | end_sequence (); | |
1913 | emit_insn_before (seq, insn); | |
1914 | return change_address (x, VOIDmode, temp); | |
1915 | } | |
1916 | return x; | |
1917 | } | |
1918 | ||
1919 | fmt = GET_RTX_FORMAT (code); | |
1920 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1921 | { | |
1922 | if (fmt[i] == 'e') | |
1923 | XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn); | |
1924 | if (fmt[i] == 'E') | |
1925 | { | |
1926 | register int j; | |
1927 | for (j = 0; j < XVECLEN (x, i); j++) | |
1928 | XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn); | |
1929 | } | |
1930 | } | |
1931 | return x; | |
1932 | } | |
1933 | \f | |
1934 | /* Optimization: a bit-field instruction whose field | |
1935 | happens to be a byte or halfword in memory | |
1936 | can be changed to a move instruction. | |
1937 | ||
1938 | We call here when INSN is an insn to examine or store into a bit-field. | |
1939 | BODY is the SET-rtx to be altered. | |
1940 | ||
1941 | EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0. | |
1942 | (Currently this is called only from function.c, and EQUIV_MEM | |
1943 | is always 0.) */ | |
1944 | ||
1945 | static void | |
1946 | optimize_bit_field (body, insn, equiv_mem) | |
1947 | rtx body; | |
1948 | rtx insn; | |
1949 | rtx *equiv_mem; | |
1950 | { | |
1951 | register rtx bitfield; | |
1952 | int destflag; | |
1953 | rtx seq = 0; | |
1954 | enum machine_mode mode; | |
1955 | ||
1956 | if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT | |
1957 | || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT) | |
1958 | bitfield = SET_DEST (body), destflag = 1; | |
1959 | else | |
1960 | bitfield = SET_SRC (body), destflag = 0; | |
1961 | ||
1962 | /* First check that the field being stored has constant size and position | |
1963 | and is in fact a byte or halfword suitably aligned. */ | |
1964 | ||
1965 | if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT | |
1966 | && GET_CODE (XEXP (bitfield, 2)) == CONST_INT | |
1967 | && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1)) | |
1968 | != BLKmode) | |
1969 | && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0) | |
1970 | { | |
1971 | register rtx memref = 0; | |
1972 | ||
1973 | /* Now check that the containing word is memory, not a register, | |
1974 | and that it is safe to change the machine mode. */ | |
1975 | ||
1976 | if (GET_CODE (XEXP (bitfield, 0)) == MEM) | |
1977 | memref = XEXP (bitfield, 0); | |
1978 | else if (GET_CODE (XEXP (bitfield, 0)) == REG | |
1979 | && equiv_mem != 0) | |
1980 | memref = equiv_mem[REGNO (XEXP (bitfield, 0))]; | |
1981 | else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG | |
1982 | && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM) | |
1983 | memref = SUBREG_REG (XEXP (bitfield, 0)); | |
1984 | else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG | |
1985 | && equiv_mem != 0 | |
1986 | && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG) | |
1987 | memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))]; | |
1988 | ||
1989 | if (memref | |
1990 | && ! mode_dependent_address_p (XEXP (memref, 0)) | |
1991 | && ! MEM_VOLATILE_P (memref)) | |
1992 | { | |
1993 | /* Now adjust the address, first for any subreg'ing | |
1994 | that we are now getting rid of, | |
1995 | and then for which byte of the word is wanted. */ | |
1996 | ||
1997 | register int offset = INTVAL (XEXP (bitfield, 2)); | |
1998 | /* Adjust OFFSET to count bits from low-address byte. */ | |
1999 | #if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN | |
2000 | offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0))) | |
2001 | - offset - INTVAL (XEXP (bitfield, 1))); | |
2002 | #endif | |
2003 | /* Adjust OFFSET to count bytes from low-address byte. */ | |
2004 | offset /= BITS_PER_UNIT; | |
2005 | if (GET_CODE (XEXP (bitfield, 0)) == SUBREG) | |
2006 | { | |
2007 | offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD; | |
2008 | #if BYTES_BIG_ENDIAN | |
2009 | offset -= (MIN (UNITS_PER_WORD, | |
2010 | GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0)))) | |
2011 | - MIN (UNITS_PER_WORD, | |
2012 | GET_MODE_SIZE (GET_MODE (memref)))); | |
2013 | #endif | |
2014 | } | |
2015 | ||
2016 | memref = change_address (memref, mode, | |
2017 | plus_constant (XEXP (memref, 0), offset)); | |
2018 | ||
2019 | /* Store this memory reference where | |
2020 | we found the bit field reference. */ | |
2021 | ||
2022 | if (destflag) | |
2023 | { | |
2024 | validate_change (insn, &SET_DEST (body), memref, 1); | |
2025 | if (! CONSTANT_ADDRESS_P (SET_SRC (body))) | |
2026 | { | |
2027 | rtx src = SET_SRC (body); | |
2028 | while (GET_CODE (src) == SUBREG | |
2029 | && SUBREG_WORD (src) == 0) | |
2030 | src = SUBREG_REG (src); | |
2031 | if (GET_MODE (src) != GET_MODE (memref)) | |
2032 | src = gen_lowpart (GET_MODE (memref), SET_SRC (body)); | |
2033 | validate_change (insn, &SET_SRC (body), src, 1); | |
2034 | } | |
2035 | else if (GET_MODE (SET_SRC (body)) != VOIDmode | |
2036 | && GET_MODE (SET_SRC (body)) != GET_MODE (memref)) | |
2037 | /* This shouldn't happen because anything that didn't have | |
2038 | one of these modes should have got converted explicitly | |
2039 | and then referenced through a subreg. | |
2040 | This is so because the original bit-field was | |
2041 | handled by agg_mode and so its tree structure had | |
2042 | the same mode that memref now has. */ | |
2043 | abort (); | |
2044 | } | |
2045 | else | |
2046 | { | |
2047 | rtx dest = SET_DEST (body); | |
2048 | ||
2049 | while (GET_CODE (dest) == SUBREG | |
2050 | && SUBREG_WORD (dest) == 0) | |
2051 | dest = SUBREG_REG (dest); | |
2052 | ||
2053 | validate_change (insn, &SET_DEST (body), dest, 1); | |
2054 | ||
2055 | if (GET_MODE (dest) == GET_MODE (memref)) | |
2056 | validate_change (insn, &SET_SRC (body), memref, 1); | |
2057 | else | |
2058 | { | |
2059 | /* Convert the mem ref to the destination mode. */ | |
2060 | rtx newreg = gen_reg_rtx (GET_MODE (dest)); | |
2061 | ||
2062 | start_sequence (); | |
2063 | convert_move (newreg, memref, | |
2064 | GET_CODE (SET_SRC (body)) == ZERO_EXTRACT); | |
2065 | seq = get_insns (); | |
2066 | end_sequence (); | |
2067 | ||
2068 | validate_change (insn, &SET_SRC (body), newreg, 1); | |
2069 | } | |
2070 | } | |
2071 | ||
2072 | /* See if we can convert this extraction or insertion into | |
2073 | a simple move insn. We might not be able to do so if this | |
2074 | was, for example, part of a PARALLEL. | |
2075 | ||
2076 | If we succeed, write out any needed conversions. If we fail, | |
2077 | it is hard to guess why we failed, so don't do anything | |
2078 | special; just let the optimization be suppressed. */ | |
2079 | ||
2080 | if (apply_change_group () && seq) | |
2081 | emit_insns_before (seq, insn); | |
2082 | } | |
2083 | } | |
2084 | } | |
2085 | \f | |
2086 | /* These routines are responsible for converting virtual register references | |
2087 | to the actual hard register references once RTL generation is complete. | |
2088 | ||
2089 | The following four variables are used for communication between the | |
2090 | routines. They contain the offsets of the virtual registers from their | |
2091 | respective hard registers. */ | |
2092 | ||
2093 | static int in_arg_offset; | |
2094 | static int var_offset; | |
2095 | static int dynamic_offset; | |
2096 | static int out_arg_offset; | |
2097 | ||
2098 | /* In most machines, the stack pointer register is equivalent to the bottom | |
2099 | of the stack. */ | |
2100 | ||
2101 | #ifndef STACK_POINTER_OFFSET | |
2102 | #define STACK_POINTER_OFFSET 0 | |
2103 | #endif | |
2104 | ||
2105 | /* If not defined, pick an appropriate default for the offset of dynamically | |
2106 | allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS, | |
2107 | REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */ | |
2108 | ||
2109 | #ifndef STACK_DYNAMIC_OFFSET | |
2110 | ||
2111 | #ifdef ACCUMULATE_OUTGOING_ARGS | |
2112 | /* The bottom of the stack points to the actual arguments. If | |
2113 | REG_PARM_STACK_SPACE is defined, this includes the space for the register | |
2114 | parameters. However, if OUTGOING_REG_PARM_STACK space is not defined, | |
2115 | stack space for register parameters is not pushed by the caller, but | |
2116 | rather part of the fixed stack areas and hence not included in | |
2117 | `current_function_outgoing_args_size'. Nevertheless, we must allow | |
2118 | for it when allocating stack dynamic objects. */ | |
2119 | ||
2120 | #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE) | |
2121 | #define STACK_DYNAMIC_OFFSET(FNDECL) \ | |
2122 | (current_function_outgoing_args_size \ | |
2123 | + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET)) | |
2124 | ||
2125 | #else | |
2126 | #define STACK_DYNAMIC_OFFSET(FNDECL) \ | |
2127 | (current_function_outgoing_args_size + (STACK_POINTER_OFFSET)) | |
2128 | #endif | |
2129 | ||
2130 | #else | |
2131 | #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET | |
2132 | #endif | |
2133 | #endif | |
2134 | ||
2135 | /* Pass through the INSNS of function FNDECL and convert virtual register | |
2136 | references to hard register references. */ | |
2137 | ||
2138 | void | |
2139 | instantiate_virtual_regs (fndecl, insns) | |
2140 | tree fndecl; | |
2141 | rtx insns; | |
2142 | { | |
2143 | rtx insn; | |
2144 | ||
2145 | /* Compute the offsets to use for this function. */ | |
2146 | in_arg_offset = FIRST_PARM_OFFSET (fndecl); | |
2147 | var_offset = STARTING_FRAME_OFFSET; | |
2148 | dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl); | |
2149 | out_arg_offset = STACK_POINTER_OFFSET; | |
2150 | ||
2151 | /* Scan all variables and parameters of this function. For each that is | |
2152 | in memory, instantiate all virtual registers if the result is a valid | |
2153 | address. If not, we do it later. That will handle most uses of virtual | |
2154 | regs on many machines. */ | |
2155 | instantiate_decls (fndecl, 1); | |
2156 | ||
2157 | /* Initialize recognition, indicating that volatile is OK. */ | |
2158 | init_recog (); | |
2159 | ||
2160 | /* Scan through all the insns, instantiating every virtual register still | |
2161 | present. */ | |
2162 | for (insn = insns; insn; insn = NEXT_INSN (insn)) | |
2163 | if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN | |
2164 | || GET_CODE (insn) == CALL_INSN) | |
2165 | { | |
2166 | instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1); | |
2167 | instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0); | |
2168 | } | |
2169 | ||
2170 | /* Now instantiate the remaining register equivalences for debugging info. | |
2171 | These will not be valid addresses. */ | |
2172 | instantiate_decls (fndecl, 0); | |
2173 | ||
2174 | /* Indicate that, from now on, assign_stack_local should use | |
2175 | frame_pointer_rtx. */ | |
2176 | virtuals_instantiated = 1; | |
2177 | } | |
2178 | ||
2179 | /* Scan all decls in FNDECL (both variables and parameters) and instantiate | |
2180 | all virtual registers in their DECL_RTL's. | |
2181 | ||
2182 | If VALID_ONLY, do this only if the resulting address is still valid. | |
2183 | Otherwise, always do it. */ | |
2184 | ||
2185 | static void | |
2186 | instantiate_decls (fndecl, valid_only) | |
2187 | tree fndecl; | |
2188 | int valid_only; | |
2189 | { | |
2190 | tree decl; | |
2191 | ||
2192 | if (DECL_INLINE (fndecl)) | |
2193 | /* When compiling an inline function, the obstack used for | |
2194 | rtl allocation is the maybepermanent_obstack. Calling | |
2195 | `resume_temporary_allocation' switches us back to that | |
2196 | obstack while we process this function's parameters. */ | |
2197 | resume_temporary_allocation (); | |
2198 | ||
2199 | /* Process all parameters of the function. */ | |
2200 | for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl)) | |
2201 | { | |
2202 | instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)), | |
2203 | valid_only); | |
2204 | instantiate_decl (DECL_INCOMING_RTL (decl), | |
2205 | int_size_in_bytes (TREE_TYPE (decl)), valid_only); | |
2206 | } | |
2207 | ||
2208 | /* Now process all variables defined in the function or its subblocks. */ | |
2209 | instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only); | |
2210 | ||
2211 | if (DECL_INLINE (fndecl)) | |
2212 | { | |
2213 | /* Save all rtl allocated for this function by raising the | |
2214 | high-water mark on the maybepermanent_obstack. */ | |
2215 | preserve_data (); | |
2216 | /* All further rtl allocation is now done in the current_obstack. */ | |
2217 | rtl_in_current_obstack (); | |
2218 | } | |
2219 | } | |
2220 | ||
2221 | /* Subroutine of instantiate_decls: Process all decls in the given | |
2222 | BLOCK node and all its subblocks. */ | |
2223 | ||
2224 | static void | |
2225 | instantiate_decls_1 (let, valid_only) | |
2226 | tree let; | |
2227 | int valid_only; | |
2228 | { | |
2229 | tree t; | |
2230 | ||
2231 | for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t)) | |
2232 | instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)), | |
2233 | valid_only); | |
2234 | ||
2235 | /* Process all subblocks. */ | |
2236 | for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t)) | |
2237 | instantiate_decls_1 (t, valid_only); | |
2238 | } | |
2239 | ||
2240 | /* Subroutine of the preceding procedures: Given RTL representing a | |
2241 | decl and the size of the object, do any instantiation required. | |
2242 | ||
2243 | If VALID_ONLY is non-zero, it means that the RTL should only be | |
2244 | changed if the new address is valid. */ | |
2245 | ||
2246 | static void | |
2247 | instantiate_decl (x, size, valid_only) | |
2248 | rtx x; | |
2249 | int size; | |
2250 | int valid_only; | |
2251 | { | |
2252 | enum machine_mode mode; | |
2253 | rtx addr; | |
2254 | ||
2255 | /* If this is not a MEM, no need to do anything. Similarly if the | |
2256 | address is a constant or a register that is not a virtual register. */ | |
2257 | ||
2258 | if (x == 0 || GET_CODE (x) != MEM) | |
2259 | return; | |
2260 | ||
2261 | addr = XEXP (x, 0); | |
2262 | if (CONSTANT_P (addr) | |
2263 | || (GET_CODE (addr) == REG | |
2264 | && (REGNO (addr) < FIRST_VIRTUAL_REGISTER | |
2265 | || REGNO (addr) > LAST_VIRTUAL_REGISTER))) | |
2266 | return; | |
2267 | ||
2268 | /* If we should only do this if the address is valid, copy the address. | |
2269 | We need to do this so we can undo any changes that might make the | |
2270 | address invalid. This copy is unfortunate, but probably can't be | |
2271 | avoided. */ | |
2272 | ||
2273 | if (valid_only) | |
2274 | addr = copy_rtx (addr); | |
2275 | ||
2276 | instantiate_virtual_regs_1 (&addr, NULL_RTX, 0); | |
2277 | ||
2278 | if (! valid_only) | |
2279 | return; | |
2280 | ||
2281 | /* Now verify that the resulting address is valid for every integer or | |
2282 | floating-point mode up to and including SIZE bytes long. We do this | |
2283 | since the object might be accessed in any mode and frame addresses | |
2284 | are shared. */ | |
2285 | ||
2286 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
2287 | mode != VOIDmode && GET_MODE_SIZE (mode) <= size; | |
2288 | mode = GET_MODE_WIDER_MODE (mode)) | |
2289 | if (! memory_address_p (mode, addr)) | |
2290 | return; | |
2291 | ||
2292 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); | |
2293 | mode != VOIDmode && GET_MODE_SIZE (mode) <= size; | |
2294 | mode = GET_MODE_WIDER_MODE (mode)) | |
2295 | if (! memory_address_p (mode, addr)) | |
2296 | return; | |
2297 | ||
2298 | /* Otherwise, put back the address, now that we have updated it and we | |
2299 | know it is valid. */ | |
2300 | ||
2301 | XEXP (x, 0) = addr; | |
2302 | } | |
2303 | \f | |
2304 | /* Given a pointer to a piece of rtx and an optional pointer to the | |
2305 | containing object, instantiate any virtual registers present in it. | |
2306 | ||
2307 | If EXTRA_INSNS, we always do the replacement and generate | |
2308 | any extra insns before OBJECT. If it zero, we do nothing if replacement | |
2309 | is not valid. | |
2310 | ||
2311 | Return 1 if we either had nothing to do or if we were able to do the | |
2312 | needed replacement. Return 0 otherwise; we only return zero if | |
2313 | EXTRA_INSNS is zero. | |
2314 | ||
2315 | We first try some simple transformations to avoid the creation of extra | |
2316 | pseudos. */ | |
2317 | ||
2318 | static int | |
2319 | instantiate_virtual_regs_1 (loc, object, extra_insns) | |
2320 | rtx *loc; | |
2321 | rtx object; | |
2322 | int extra_insns; | |
2323 | { | |
2324 | rtx x; | |
2325 | RTX_CODE code; | |
2326 | rtx new = 0; | |
2327 | int offset; | |
2328 | rtx temp; | |
2329 | rtx seq; | |
2330 | int i, j; | |
2331 | char *fmt; | |
2332 | ||
2333 | /* Re-start here to avoid recursion in common cases. */ | |
2334 | restart: | |
2335 | ||
2336 | x = *loc; | |
2337 | if (x == 0) | |
2338 | return 1; | |
2339 | ||
2340 | code = GET_CODE (x); | |
2341 | ||
2342 | /* Check for some special cases. */ | |
2343 | switch (code) | |
2344 | { | |
2345 | case CONST_INT: | |
2346 | case CONST_DOUBLE: | |
2347 | case CONST: | |
2348 | case SYMBOL_REF: | |
2349 | case CODE_LABEL: | |
2350 | case PC: | |
2351 | case CC0: | |
2352 | case ASM_INPUT: | |
2353 | case ADDR_VEC: | |
2354 | case ADDR_DIFF_VEC: | |
2355 | case RETURN: | |
2356 | return 1; | |
2357 | ||
2358 | case SET: | |
2359 | /* We are allowed to set the virtual registers. This means that | |
2360 | that the actual register should receive the source minus the | |
2361 | appropriate offset. This is used, for example, in the handling | |
2362 | of non-local gotos. */ | |
2363 | if (SET_DEST (x) == virtual_incoming_args_rtx) | |
2364 | new = arg_pointer_rtx, offset = - in_arg_offset; | |
2365 | else if (SET_DEST (x) == virtual_stack_vars_rtx) | |
2366 | new = frame_pointer_rtx, offset = - var_offset; | |
2367 | else if (SET_DEST (x) == virtual_stack_dynamic_rtx) | |
2368 | new = stack_pointer_rtx, offset = - dynamic_offset; | |
2369 | else if (SET_DEST (x) == virtual_outgoing_args_rtx) | |
2370 | new = stack_pointer_rtx, offset = - out_arg_offset; | |
2371 | ||
2372 | if (new) | |
2373 | { | |
2374 | /* The only valid sources here are PLUS or REG. Just do | |
2375 | the simplest possible thing to handle them. */ | |
2376 | if (GET_CODE (SET_SRC (x)) != REG | |
2377 | && GET_CODE (SET_SRC (x)) != PLUS) | |
2378 | abort (); | |
2379 | ||
2380 | start_sequence (); | |
2381 | if (GET_CODE (SET_SRC (x)) != REG) | |
2382 | temp = force_operand (SET_SRC (x), NULL_RTX); | |
2383 | else | |
2384 | temp = SET_SRC (x); | |
2385 | temp = force_operand (plus_constant (temp, offset), NULL_RTX); | |
2386 | seq = get_insns (); | |
2387 | end_sequence (); | |
2388 | ||
2389 | emit_insns_before (seq, object); | |
2390 | SET_DEST (x) = new; | |
2391 | ||
2392 | if (!validate_change (object, &SET_SRC (x), temp, 0) | |
2393 | || ! extra_insns) | |
2394 | abort (); | |
2395 | ||
2396 | return 1; | |
2397 | } | |
2398 | ||
2399 | instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns); | |
2400 | loc = &SET_SRC (x); | |
2401 | goto restart; | |
2402 | ||
2403 | case PLUS: | |
2404 | /* Handle special case of virtual register plus constant. */ | |
2405 | if (CONSTANT_P (XEXP (x, 1))) | |
2406 | { | |
2407 | rtx old; | |
2408 | ||
2409 | /* Check for (plus (plus VIRT foo) (const_int)) first. */ | |
2410 | if (GET_CODE (XEXP (x, 0)) == PLUS) | |
2411 | { | |
2412 | rtx inner = XEXP (XEXP (x, 0), 0); | |
2413 | ||
2414 | if (inner == virtual_incoming_args_rtx) | |
2415 | new = arg_pointer_rtx, offset = in_arg_offset; | |
2416 | else if (inner == virtual_stack_vars_rtx) | |
2417 | new = frame_pointer_rtx, offset = var_offset; | |
2418 | else if (inner == virtual_stack_dynamic_rtx) | |
2419 | new = stack_pointer_rtx, offset = dynamic_offset; | |
2420 | else if (inner == virtual_outgoing_args_rtx) | |
2421 | new = stack_pointer_rtx, offset = out_arg_offset; | |
2422 | else | |
2423 | { | |
2424 | loc = &XEXP (x, 0); | |
2425 | goto restart; | |
2426 | } | |
2427 | ||
2428 | instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object, | |
2429 | extra_insns); | |
2430 | new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1)); | |
2431 | } | |
2432 | ||
2433 | else if (XEXP (x, 0) == virtual_incoming_args_rtx) | |
2434 | new = arg_pointer_rtx, offset = in_arg_offset; | |
2435 | else if (XEXP (x, 0) == virtual_stack_vars_rtx) | |
2436 | new = frame_pointer_rtx, offset = var_offset; | |
2437 | else if (XEXP (x, 0) == virtual_stack_dynamic_rtx) | |
2438 | new = stack_pointer_rtx, offset = dynamic_offset; | |
2439 | else if (XEXP (x, 0) == virtual_outgoing_args_rtx) | |
2440 | new = stack_pointer_rtx, offset = out_arg_offset; | |
2441 | else | |
2442 | { | |
2443 | /* We know the second operand is a constant. Unless the | |
2444 | first operand is a REG (which has been already checked), | |
2445 | it needs to be checked. */ | |
2446 | if (GET_CODE (XEXP (x, 0)) != REG) | |
2447 | { | |
2448 | loc = &XEXP (x, 0); | |
2449 | goto restart; | |
2450 | } | |
2451 | return 1; | |
2452 | } | |
2453 | ||
2454 | old = XEXP (x, 0); | |
2455 | XEXP (x, 0) = new; | |
2456 | new = plus_constant (XEXP (x, 1), offset); | |
2457 | ||
2458 | /* If the new constant is zero, try to replace the sum with its | |
2459 | first operand. */ | |
2460 | if (new == const0_rtx | |
2461 | && validate_change (object, loc, XEXP (x, 0), 0)) | |
2462 | return 1; | |
2463 | ||
2464 | /* Next try to replace constant with new one. */ | |
2465 | if (!validate_change (object, &XEXP (x, 1), new, 0)) | |
2466 | { | |
2467 | if (! extra_insns) | |
2468 | { | |
2469 | XEXP (x, 0) = old; | |
2470 | return 0; | |
2471 | } | |
2472 | ||
2473 | /* Otherwise copy the new constant into a register and replace | |
2474 | constant with that register. */ | |
2475 | temp = gen_reg_rtx (Pmode); | |
2476 | if (validate_change (object, &XEXP (x, 1), temp, 0)) | |
2477 | emit_insn_before (gen_move_insn (temp, new), object); | |
2478 | else | |
2479 | { | |
2480 | /* If that didn't work, replace this expression with a | |
2481 | register containing the sum. */ | |
2482 | ||
2483 | new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new); | |
2484 | XEXP (x, 0) = old; | |
2485 | ||
2486 | start_sequence (); | |
2487 | temp = force_operand (new, NULL_RTX); | |
2488 | seq = get_insns (); | |
2489 | end_sequence (); | |
2490 | ||
2491 | emit_insns_before (seq, object); | |
2492 | if (! validate_change (object, loc, temp, 0) | |
2493 | && ! validate_replace_rtx (x, temp, object)) | |
2494 | abort (); | |
2495 | } | |
2496 | } | |
2497 | ||
2498 | return 1; | |
2499 | } | |
2500 | ||
2501 | /* Fall through to generic two-operand expression case. */ | |
2502 | case EXPR_LIST: | |
2503 | case CALL: | |
2504 | case COMPARE: | |
2505 | case MINUS: | |
2506 | case MULT: | |
2507 | case DIV: case UDIV: | |
2508 | case MOD: case UMOD: | |
2509 | case AND: case IOR: case XOR: | |
2510 | case LSHIFT: case ASHIFT: case ROTATE: | |
2511 | case ASHIFTRT: case LSHIFTRT: case ROTATERT: | |
2512 | case NE: case EQ: | |
2513 | case GE: case GT: case GEU: case GTU: | |
2514 | case LE: case LT: case LEU: case LTU: | |
2515 | if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1))) | |
2516 | instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns); | |
2517 | loc = &XEXP (x, 0); | |
2518 | goto restart; | |
2519 | ||
2520 | case MEM: | |
2521 | /* Most cases of MEM that convert to valid addresses have already been | |
2522 | handled by our scan of regno_reg_rtx. The only special handling we | |
2523 | need here is to make a copy of the rtx to ensure it isn't being | |
2524 | shared if we have to change it to a pseudo. | |
2525 | ||
2526 | If the rtx is a simple reference to an address via a virtual register, | |
2527 | it can potentially be shared. In such cases, first try to make it | |
2528 | a valid address, which can also be shared. Otherwise, copy it and | |
2529 | proceed normally. | |
2530 | ||
2531 | First check for common cases that need no processing. These are | |
2532 | usually due to instantiation already being done on a previous instance | |
2533 | of a shared rtx. */ | |
2534 | ||
2535 | temp = XEXP (x, 0); | |
2536 | if (CONSTANT_ADDRESS_P (temp) | |
2537 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
2538 | || temp == arg_pointer_rtx | |
2539 | #endif | |
2540 | || temp == frame_pointer_rtx) | |
2541 | return 1; | |
2542 | ||
2543 | if (GET_CODE (temp) == PLUS | |
2544 | && CONSTANT_ADDRESS_P (XEXP (temp, 1)) | |
2545 | && (XEXP (temp, 0) == frame_pointer_rtx | |
2546 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
2547 | || XEXP (temp, 0) == arg_pointer_rtx | |
2548 | #endif | |
2549 | )) | |
2550 | return 1; | |
2551 | ||
2552 | if (temp == virtual_stack_vars_rtx | |
2553 | || temp == virtual_incoming_args_rtx | |
2554 | || (GET_CODE (temp) == PLUS | |
2555 | && CONSTANT_ADDRESS_P (XEXP (temp, 1)) | |
2556 | && (XEXP (temp, 0) == virtual_stack_vars_rtx | |
2557 | || XEXP (temp, 0) == virtual_incoming_args_rtx))) | |
2558 | { | |
2559 | /* This MEM may be shared. If the substitution can be done without | |
2560 | the need to generate new pseudos, we want to do it in place | |
2561 | so all copies of the shared rtx benefit. The call below will | |
2562 | only make substitutions if the resulting address is still | |
2563 | valid. | |
2564 | ||
2565 | Note that we cannot pass X as the object in the recursive call | |
2566 | since the insn being processed may not allow all valid | |
2567 | addresses. However, if we were not passed on object, we can | |
2568 | only modify X without copying it if X will have a valid | |
2569 | address. | |
2570 | ||
2571 | ??? Also note that this can still lose if OBJECT is an insn that | |
2572 | has less restrictions on an address that some other insn. | |
2573 | In that case, we will modify the shared address. This case | |
2574 | doesn't seem very likely, though. */ | |
2575 | ||
2576 | if (instantiate_virtual_regs_1 (&XEXP (x, 0), | |
2577 | object ? object : x, 0)) | |
2578 | return 1; | |
2579 | ||
2580 | /* Otherwise make a copy and process that copy. We copy the entire | |
2581 | RTL expression since it might be a PLUS which could also be | |
2582 | shared. */ | |
2583 | *loc = x = copy_rtx (x); | |
2584 | } | |
2585 | ||
2586 | /* Fall through to generic unary operation case. */ | |
2587 | case USE: | |
2588 | case CLOBBER: | |
2589 | case SUBREG: | |
2590 | case STRICT_LOW_PART: | |
2591 | case NEG: case NOT: | |
2592 | case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC: | |
2593 | case SIGN_EXTEND: case ZERO_EXTEND: | |
2594 | case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: | |
2595 | case FLOAT: case FIX: | |
2596 | case UNSIGNED_FIX: case UNSIGNED_FLOAT: | |
2597 | case ABS: | |
2598 | case SQRT: | |
2599 | case FFS: | |
2600 | /* These case either have just one operand or we know that we need not | |
2601 | check the rest of the operands. */ | |
2602 | loc = &XEXP (x, 0); | |
2603 | goto restart; | |
2604 | ||
2605 | case REG: | |
2606 | /* Try to replace with a PLUS. If that doesn't work, compute the sum | |
2607 | in front of this insn and substitute the temporary. */ | |
2608 | if (x == virtual_incoming_args_rtx) | |
2609 | new = arg_pointer_rtx, offset = in_arg_offset; | |
2610 | else if (x == virtual_stack_vars_rtx) | |
2611 | new = frame_pointer_rtx, offset = var_offset; | |
2612 | else if (x == virtual_stack_dynamic_rtx) | |
2613 | new = stack_pointer_rtx, offset = dynamic_offset; | |
2614 | else if (x == virtual_outgoing_args_rtx) | |
2615 | new = stack_pointer_rtx, offset = out_arg_offset; | |
2616 | ||
2617 | if (new) | |
2618 | { | |
2619 | temp = plus_constant (new, offset); | |
2620 | if (!validate_change (object, loc, temp, 0)) | |
2621 | { | |
2622 | if (! extra_insns) | |
2623 | return 0; | |
2624 | ||
2625 | start_sequence (); | |
2626 | temp = force_operand (temp, NULL_RTX); | |
2627 | seq = get_insns (); | |
2628 | end_sequence (); | |
2629 | ||
2630 | emit_insns_before (seq, object); | |
2631 | if (! validate_change (object, loc, temp, 0) | |
2632 | && ! validate_replace_rtx (x, temp, object)) | |
2633 | abort (); | |
2634 | } | |
2635 | } | |
2636 | ||
2637 | return 1; | |
2638 | } | |
2639 | ||
2640 | /* Scan all subexpressions. */ | |
2641 | fmt = GET_RTX_FORMAT (code); | |
2642 | for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) | |
2643 | if (*fmt == 'e') | |
2644 | { | |
2645 | if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns)) | |
2646 | return 0; | |
2647 | } | |
2648 | else if (*fmt == 'E') | |
2649 | for (j = 0; j < XVECLEN (x, i); j++) | |
2650 | if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object, | |
2651 | extra_insns)) | |
2652 | return 0; | |
2653 | ||
2654 | return 1; | |
2655 | } | |
2656 | \f | |
2657 | /* Optimization: assuming this function does not receive nonlocal gotos, | |
2658 | delete the handlers for such, as well as the insns to establish | |
2659 | and disestablish them. */ | |
2660 | ||
2661 | static void | |
2662 | delete_handlers () | |
2663 | { | |
2664 | rtx insn; | |
2665 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
2666 | { | |
2667 | /* Delete the handler by turning off the flag that would | |
2668 | prevent jump_optimize from deleting it. | |
2669 | Also permit deletion of the nonlocal labels themselves | |
2670 | if nothing local refers to them. */ | |
2671 | if (GET_CODE (insn) == CODE_LABEL) | |
2672 | LABEL_PRESERVE_P (insn) = 0; | |
2673 | if (GET_CODE (insn) == INSN | |
2674 | && ((nonlocal_goto_handler_slot != 0 | |
2675 | && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn))) | |
2676 | || (nonlocal_goto_stack_level != 0 | |
2677 | && reg_mentioned_p (nonlocal_goto_stack_level, | |
2678 | PATTERN (insn))))) | |
2679 | delete_insn (insn); | |
2680 | } | |
2681 | } | |
2682 | ||
2683 | /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels | |
2684 | of the current function. */ | |
2685 | ||
2686 | rtx | |
2687 | nonlocal_label_rtx_list () | |
2688 | { | |
2689 | tree t; | |
2690 | rtx x = 0; | |
2691 | ||
2692 | for (t = nonlocal_labels; t; t = TREE_CHAIN (t)) | |
2693 | x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x); | |
2694 | ||
2695 | return x; | |
2696 | } | |
2697 | \f | |
2698 | /* Output a USE for any register use in RTL. | |
2699 | This is used with -noreg to mark the extent of lifespan | |
2700 | of any registers used in a user-visible variable's DECL_RTL. */ | |
2701 | ||
2702 | void | |
2703 | use_variable (rtl) | |
2704 | rtx rtl; | |
2705 | { | |
2706 | if (GET_CODE (rtl) == REG) | |
2707 | /* This is a register variable. */ | |
2708 | emit_insn (gen_rtx (USE, VOIDmode, rtl)); | |
2709 | else if (GET_CODE (rtl) == MEM | |
2710 | && GET_CODE (XEXP (rtl, 0)) == REG | |
2711 | && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER | |
2712 | || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER) | |
2713 | && XEXP (rtl, 0) != current_function_internal_arg_pointer) | |
2714 | /* This is a variable-sized structure. */ | |
2715 | emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0))); | |
2716 | } | |
2717 | ||
2718 | /* Like use_variable except that it outputs the USEs after INSN | |
2719 | instead of at the end of the insn-chain. */ | |
2720 | ||
2721 | void | |
2722 | use_variable_after (rtl, insn) | |
2723 | rtx rtl, insn; | |
2724 | { | |
2725 | if (GET_CODE (rtl) == REG) | |
2726 | /* This is a register variable. */ | |
2727 | emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn); | |
2728 | else if (GET_CODE (rtl) == MEM | |
2729 | && GET_CODE (XEXP (rtl, 0)) == REG | |
2730 | && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER | |
2731 | || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER) | |
2732 | && XEXP (rtl, 0) != current_function_internal_arg_pointer) | |
2733 | /* This is a variable-sized structure. */ | |
2734 | emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn); | |
2735 | } | |
2736 | \f | |
2737 | int | |
2738 | max_parm_reg_num () | |
2739 | { | |
2740 | return max_parm_reg; | |
2741 | } | |
2742 | ||
2743 | /* Return the first insn following those generated by `assign_parms'. */ | |
2744 | ||
2745 | rtx | |
2746 | get_first_nonparm_insn () | |
2747 | { | |
2748 | if (last_parm_insn) | |
2749 | return NEXT_INSN (last_parm_insn); | |
2750 | return get_insns (); | |
2751 | } | |
2752 | ||
2753 | /* Return the first NOTE_INSN_BLOCK_BEG note in the function. | |
2754 | Crash if there is none. */ | |
2755 | ||
2756 | rtx | |
2757 | get_first_block_beg () | |
2758 | { | |
2759 | register rtx searcher; | |
2760 | register rtx insn = get_first_nonparm_insn (); | |
2761 | ||
2762 | for (searcher = insn; searcher; searcher = NEXT_INSN (searcher)) | |
2763 | if (GET_CODE (searcher) == NOTE | |
2764 | && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG) | |
2765 | return searcher; | |
2766 | ||
2767 | abort (); /* Invalid call to this function. (See comments above.) */ | |
2768 | return NULL_RTX; | |
2769 | } | |
2770 | ||
2771 | /* Return 1 if EXP returns an aggregate value, for which an address | |
2772 | must be passed to the function or returned by the function. */ | |
2773 | ||
2774 | int | |
2775 | aggregate_value_p (exp) | |
2776 | tree exp; | |
2777 | { | |
2778 | int i, regno, nregs; | |
2779 | rtx reg; | |
2780 | if (RETURN_IN_MEMORY (TREE_TYPE (exp))) | |
2781 | return 1; | |
2782 | if (flag_pcc_struct_return | |
2783 | && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE | |
2784 | || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE | |
2785 | || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE | |
2786 | || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE)) | |
2787 | return 1; | |
2788 | /* Make sure we have suitable call-clobbered regs to return | |
2789 | the value in; if not, we must return it in memory. */ | |
2790 | reg = hard_function_value (TREE_TYPE (exp), 0); | |
2791 | regno = REGNO (reg); | |
2792 | nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (TREE_TYPE (exp))); | |
2793 | for (i = 0; i < nregs; i++) | |
2794 | if (! call_used_regs[regno + i]) | |
2795 | return 1; | |
2796 | return 0; | |
2797 | } | |
2798 | \f | |
2799 | /* Assign RTL expressions to the function's parameters. | |
2800 | This may involve copying them into registers and using | |
2801 | those registers as the RTL for them. | |
2802 | ||
2803 | If SECOND_TIME is non-zero it means that this function is being | |
2804 | called a second time. This is done by integrate.c when a function's | |
2805 | compilation is deferred. We need to come back here in case the | |
2806 | FUNCTION_ARG macro computes items needed for the rest of the compilation | |
2807 | (such as changing which registers are fixed or caller-saved). But suppress | |
2808 | writing any insns or setting DECL_RTL of anything in this case. */ | |
2809 | ||
2810 | void | |
2811 | assign_parms (fndecl, second_time) | |
2812 | tree fndecl; | |
2813 | int second_time; | |
2814 | { | |
2815 | register tree parm; | |
2816 | register rtx entry_parm = 0; | |
2817 | register rtx stack_parm = 0; | |
2818 | CUMULATIVE_ARGS args_so_far; | |
2819 | enum machine_mode promoted_mode, passed_mode, nominal_mode; | |
2820 | int unsignedp; | |
2821 | /* Total space needed so far for args on the stack, | |
2822 | given as a constant and a tree-expression. */ | |
2823 | struct args_size stack_args_size; | |
2824 | tree fntype = TREE_TYPE (fndecl); | |
2825 | tree fnargs = DECL_ARGUMENTS (fndecl); | |
2826 | /* This is used for the arg pointer when referring to stack args. */ | |
2827 | rtx internal_arg_pointer; | |
2828 | /* This is a dummy PARM_DECL that we used for the function result if | |
2829 | the function returns a structure. */ | |
2830 | tree function_result_decl = 0; | |
2831 | int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1; | |
2832 | int varargs_setup = 0; | |
2833 | rtx conversion_insns = 0; | |
2834 | /* FUNCTION_ARG may look at this variable. Since this is not | |
2835 | expanding a call it will always be zero in this function. */ | |
2836 | int current_call_is_indirect = 0; | |
2837 | ||
2838 | /* Nonzero if the last arg is named `__builtin_va_alist', | |
2839 | which is used on some machines for old-fashioned non-ANSI varargs.h; | |
2840 | this should be stuck onto the stack as if it had arrived there. */ | |
2841 | int vararg | |
2842 | = (fnargs | |
2843 | && (parm = tree_last (fnargs)) != 0 | |
2844 | && DECL_NAME (parm) | |
2845 | && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)), | |
2846 | "__builtin_va_alist"))); | |
2847 | ||
2848 | /* Nonzero if function takes extra anonymous args. | |
2849 | This means the last named arg must be on the stack | |
2850 | right before the anonymous ones. */ | |
2851 | int stdarg | |
2852 | = (TYPE_ARG_TYPES (fntype) != 0 | |
2853 | && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) | |
2854 | != void_type_node)); | |
2855 | ||
2856 | /* If the reg that the virtual arg pointer will be translated into is | |
2857 | not a fixed reg or is the stack pointer, make a copy of the virtual | |
2858 | arg pointer, and address parms via the copy. The frame pointer is | |
2859 | considered fixed even though it is not marked as such. | |
2860 | ||
2861 | The second time through, simply use ap to avoid generating rtx. */ | |
2862 | ||
2863 | if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM | |
2864 | || ! (fixed_regs[ARG_POINTER_REGNUM] | |
2865 | || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)) | |
2866 | && ! second_time) | |
2867 | internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx); | |
2868 | else | |
2869 | internal_arg_pointer = virtual_incoming_args_rtx; | |
2870 | current_function_internal_arg_pointer = internal_arg_pointer; | |
2871 | ||
2872 | stack_args_size.constant = 0; | |
2873 | stack_args_size.var = 0; | |
2874 | ||
2875 | /* If struct value address is treated as the first argument, make it so. */ | |
2876 | if (aggregate_value_p (DECL_RESULT (fndecl)) | |
2877 | && ! current_function_returns_pcc_struct | |
2878 | && struct_value_incoming_rtx == 0) | |
2879 | { | |
2880 | tree type = build_pointer_type (fntype); | |
2881 | ||
2882 | function_result_decl = build_decl (PARM_DECL, NULL_TREE, type); | |
2883 | ||
2884 | DECL_ARG_TYPE (function_result_decl) = type; | |
2885 | TREE_CHAIN (function_result_decl) = fnargs; | |
2886 | fnargs = function_result_decl; | |
2887 | } | |
2888 | ||
2889 | parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx)); | |
2890 | bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx)); | |
2891 | ||
2892 | #ifdef INIT_CUMULATIVE_INCOMING_ARGS | |
2893 | INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX); | |
2894 | #else | |
2895 | INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX); | |
2896 | #endif | |
2897 | ||
2898 | /* We haven't yet found an argument that we must push and pretend the | |
2899 | caller did. */ | |
2900 | current_function_pretend_args_size = 0; | |
2901 | ||
2902 | for (parm = fnargs; parm; parm = TREE_CHAIN (parm)) | |
2903 | { | |
2904 | int aggregate | |
2905 | = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE | |
2906 | || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE | |
2907 | || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE | |
2908 | || TREE_CODE (TREE_TYPE (parm)) == QUAL_UNION_TYPE); | |
2909 | struct args_size stack_offset; | |
2910 | struct args_size arg_size; | |
2911 | int passed_pointer = 0; | |
2912 | tree passed_type = DECL_ARG_TYPE (parm); | |
2913 | ||
2914 | /* Set LAST_NAMED if this is last named arg before some | |
2915 | anonymous args. We treat it as if it were anonymous too. */ | |
2916 | int last_named = ((TREE_CHAIN (parm) == 0 | |
2917 | || DECL_NAME (TREE_CHAIN (parm)) == 0) | |
2918 | && (vararg || stdarg)); | |
2919 | ||
2920 | if (TREE_TYPE (parm) == error_mark_node | |
2921 | /* This can happen after weird syntax errors | |
2922 | or if an enum type is defined among the parms. */ | |
2923 | || TREE_CODE (parm) != PARM_DECL | |
2924 | || passed_type == NULL) | |
2925 | { | |
2926 | DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode, | |
2927 | const0_rtx); | |
2928 | TREE_USED (parm) = 1; | |
2929 | continue; | |
2930 | } | |
2931 | ||
2932 | /* For varargs.h function, save info about regs and stack space | |
2933 | used by the individual args, not including the va_alist arg. */ | |
2934 | if (vararg && last_named) | |
2935 | current_function_args_info = args_so_far; | |
2936 | ||
2937 | /* Find mode of arg as it is passed, and mode of arg | |
2938 | as it should be during execution of this function. */ | |
2939 | passed_mode = TYPE_MODE (passed_type); | |
2940 | nominal_mode = TYPE_MODE (TREE_TYPE (parm)); | |
2941 | ||
2942 | /* If the parm's mode is VOID, its value doesn't matter, | |
2943 | and avoid the usual things like emit_move_insn that could crash. */ | |
2944 | if (nominal_mode == VOIDmode) | |
2945 | { | |
2946 | DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx; | |
2947 | continue; | |
2948 | } | |
2949 | ||
2950 | #ifdef FUNCTION_ARG_PASS_BY_REFERENCE | |
2951 | /* See if this arg was passed by invisible reference. */ | |
2952 | if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode, | |
2953 | passed_type, ! last_named)) | |
2954 | { | |
2955 | passed_type = build_pointer_type (passed_type); | |
2956 | passed_pointer = 1; | |
2957 | passed_mode = nominal_mode = Pmode; | |
2958 | } | |
2959 | #endif | |
2960 | ||
2961 | promoted_mode = passed_mode; | |
2962 | ||
2963 | #ifdef PROMOTE_FUNCTION_ARGS | |
2964 | /* Compute the mode in which the arg is actually extended to. */ | |
2965 | if (TREE_CODE (passed_type) == INTEGER_TYPE | |
2966 | || TREE_CODE (passed_type) == ENUMERAL_TYPE | |
2967 | || TREE_CODE (passed_type) == BOOLEAN_TYPE | |
2968 | || TREE_CODE (passed_type) == CHAR_TYPE | |
2969 | || TREE_CODE (passed_type) == REAL_TYPE | |
2970 | || TREE_CODE (passed_type) == POINTER_TYPE | |
2971 | || TREE_CODE (passed_type) == OFFSET_TYPE) | |
2972 | { | |
2973 | unsignedp = TREE_UNSIGNED (passed_type); | |
2974 | PROMOTE_MODE (promoted_mode, unsignedp, passed_type); | |
2975 | } | |
2976 | #endif | |
2977 | ||
2978 | /* Let machine desc say which reg (if any) the parm arrives in. | |
2979 | 0 means it arrives on the stack. */ | |
2980 | #ifdef FUNCTION_INCOMING_ARG | |
2981 | entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode, | |
2982 | passed_type, ! last_named); | |
2983 | #else | |
2984 | entry_parm = FUNCTION_ARG (args_so_far, promoted_mode, | |
2985 | passed_type, ! last_named); | |
2986 | #endif | |
2987 | ||
2988 | if (entry_parm) | |
2989 | passed_mode = promoted_mode; | |
2990 | ||
2991 | #ifdef SETUP_INCOMING_VARARGS | |
2992 | /* If this is the last named parameter, do any required setup for | |
2993 | varargs or stdargs. We need to know about the case of this being an | |
2994 | addressable type, in which case we skip the registers it | |
2995 | would have arrived in. | |
2996 | ||
2997 | For stdargs, LAST_NAMED will be set for two parameters, the one that | |
2998 | is actually the last named, and the dummy parameter. We only | |
2999 | want to do this action once. | |
3000 | ||
3001 | Also, indicate when RTL generation is to be suppressed. */ | |
3002 | if (last_named && !varargs_setup) | |
3003 | { | |
3004 | SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type, | |
3005 | current_function_pretend_args_size, | |
3006 | second_time); | |
3007 | varargs_setup = 1; | |
3008 | } | |
3009 | #endif | |
3010 | ||
3011 | /* Determine parm's home in the stack, | |
3012 | in case it arrives in the stack or we should pretend it did. | |
3013 | ||
3014 | Compute the stack position and rtx where the argument arrives | |
3015 | and its size. | |
3016 | ||
3017 | There is one complexity here: If this was a parameter that would | |
3018 | have been passed in registers, but wasn't only because it is | |
3019 | __builtin_va_alist, we want locate_and_pad_parm to treat it as if | |
3020 | it came in a register so that REG_PARM_STACK_SPACE isn't skipped. | |
3021 | In this case, we call FUNCTION_ARG with NAMED set to 1 instead of | |
3022 | 0 as it was the previous time. */ | |
3023 | ||
3024 | locate_and_pad_parm (passed_mode, passed_type, | |
3025 | #ifdef STACK_PARMS_IN_REG_PARM_AREA | |
3026 | 1, | |
3027 | #else | |
3028 | #ifdef FUNCTION_INCOMING_ARG | |
3029 | FUNCTION_INCOMING_ARG (args_so_far, passed_mode, | |
3030 | passed_type, | |
3031 | (! last_named | |
3032 | || varargs_setup)) != 0, | |
3033 | #else | |
3034 | FUNCTION_ARG (args_so_far, passed_mode, | |
3035 | passed_type, | |
3036 | ! last_named || varargs_setup) != 0, | |
3037 | #endif | |
3038 | #endif | |
3039 | fndecl, &stack_args_size, &stack_offset, &arg_size); | |
3040 | ||
3041 | if (! second_time) | |
3042 | { | |
3043 | rtx offset_rtx = ARGS_SIZE_RTX (stack_offset); | |
3044 | ||
3045 | if (offset_rtx == const0_rtx) | |
3046 | stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer); | |
3047 | else | |
3048 | stack_parm = gen_rtx (MEM, passed_mode, | |
3049 | gen_rtx (PLUS, Pmode, | |
3050 | internal_arg_pointer, offset_rtx)); | |
3051 | ||
3052 | /* If this is a memory ref that contains aggregate components, | |
3053 | mark it as such for cse and loop optimize. */ | |
3054 | MEM_IN_STRUCT_P (stack_parm) = aggregate; | |
3055 | } | |
3056 | ||
3057 | /* If this parameter was passed both in registers and in the stack, | |
3058 | use the copy on the stack. */ | |
3059 | if (MUST_PASS_IN_STACK (passed_mode, passed_type)) | |
3060 | entry_parm = 0; | |
3061 | ||
3062 | #ifdef FUNCTION_ARG_PARTIAL_NREGS | |
3063 | /* If this parm was passed part in regs and part in memory, | |
3064 | pretend it arrived entirely in memory | |
3065 | by pushing the register-part onto the stack. | |
3066 | ||
3067 | In the special case of a DImode or DFmode that is split, | |
3068 | we could put it together in a pseudoreg directly, | |
3069 | but for now that's not worth bothering with. */ | |
3070 | ||
3071 | if (entry_parm) | |
3072 | { | |
3073 | int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode, | |
3074 | passed_type, ! last_named); | |
3075 | ||
3076 | if (nregs > 0) | |
3077 | { | |
3078 | current_function_pretend_args_size | |
3079 | = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1) | |
3080 | / (PARM_BOUNDARY / BITS_PER_UNIT) | |
3081 | * (PARM_BOUNDARY / BITS_PER_UNIT)); | |
3082 | ||
3083 | if (! second_time) | |
3084 | move_block_from_reg (REGNO (entry_parm), | |
3085 | validize_mem (stack_parm), nregs, | |
3086 | int_size_in_bytes (TREE_TYPE (parm))); | |
3087 | entry_parm = stack_parm; | |
3088 | } | |
3089 | } | |
3090 | #endif | |
3091 | ||
3092 | /* If we didn't decide this parm came in a register, | |
3093 | by default it came on the stack. */ | |
3094 | if (entry_parm == 0) | |
3095 | entry_parm = stack_parm; | |
3096 | ||
3097 | /* Record permanently how this parm was passed. */ | |
3098 | if (! second_time) | |
3099 | DECL_INCOMING_RTL (parm) = entry_parm; | |
3100 | ||
3101 | /* If there is actually space on the stack for this parm, | |
3102 | count it in stack_args_size; otherwise set stack_parm to 0 | |
3103 | to indicate there is no preallocated stack slot for the parm. */ | |
3104 | ||
3105 | if (entry_parm == stack_parm | |
3106 | #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE) | |
3107 | /* On some machines, even if a parm value arrives in a register | |
3108 | there is still an (uninitialized) stack slot allocated for it. | |
3109 | ||
3110 | ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell | |
3111 | whether this parameter already has a stack slot allocated, | |
3112 | because an arg block exists only if current_function_args_size | |
3113 | is larger than some threshhold, and we haven't calculated that | |
3114 | yet. So, for now, we just assume that stack slots never exist | |
3115 | in this case. */ | |
3116 | || REG_PARM_STACK_SPACE (fndecl) > 0 | |
3117 | #endif | |
3118 | ) | |
3119 | { | |
3120 | stack_args_size.constant += arg_size.constant; | |
3121 | if (arg_size.var) | |
3122 | ADD_PARM_SIZE (stack_args_size, arg_size.var); | |
3123 | } | |
3124 | else | |
3125 | /* No stack slot was pushed for this parm. */ | |
3126 | stack_parm = 0; | |
3127 | ||
3128 | /* Update info on where next arg arrives in registers. */ | |
3129 | ||
3130 | FUNCTION_ARG_ADVANCE (args_so_far, passed_mode, | |
3131 | passed_type, ! last_named); | |
3132 | ||
3133 | /* If this is our second time through, we are done with this parm. */ | |
3134 | if (second_time) | |
3135 | continue; | |
3136 | ||
3137 | /* If we can't trust the parm stack slot to be aligned enough | |
3138 | for its ultimate type, don't use that slot after entry. | |
3139 | We'll make another stack slot, if we need one. */ | |
3140 | { | |
3141 | int thisparm_boundary | |
3142 | = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type); | |
3143 | ||
3144 | if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary) | |
3145 | stack_parm = 0; | |
3146 | } | |
3147 | ||
3148 | /* If parm was passed in memory, and we need to convert it on entry, | |
3149 | don't store it back in that same slot. */ | |
3150 | if (entry_parm != 0 | |
3151 | && nominal_mode != BLKmode && nominal_mode != passed_mode) | |
3152 | stack_parm = 0; | |
3153 | ||
3154 | #if 0 | |
3155 | /* Now adjust STACK_PARM to the mode and precise location | |
3156 | where this parameter should live during execution, | |
3157 | if we discover that it must live in the stack during execution. | |
3158 | To make debuggers happier on big-endian machines, we store | |
3159 | the value in the last bytes of the space available. */ | |
3160 | ||
3161 | if (nominal_mode != BLKmode && nominal_mode != passed_mode | |
3162 | && stack_parm != 0) | |
3163 | { | |
3164 | rtx offset_rtx; | |
3165 | ||
3166 | #if BYTES_BIG_ENDIAN | |
3167 | if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD) | |
3168 | stack_offset.constant += (GET_MODE_SIZE (passed_mode) | |
3169 | - GET_MODE_SIZE (nominal_mode)); | |
3170 | #endif | |
3171 | ||
3172 | offset_rtx = ARGS_SIZE_RTX (stack_offset); | |
3173 | if (offset_rtx == const0_rtx) | |
3174 | stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer); | |
3175 | else | |
3176 | stack_parm = gen_rtx (MEM, nominal_mode, | |
3177 | gen_rtx (PLUS, Pmode, | |
3178 | internal_arg_pointer, offset_rtx)); | |
3179 | ||
3180 | /* If this is a memory ref that contains aggregate components, | |
3181 | mark it as such for cse and loop optimize. */ | |
3182 | MEM_IN_STRUCT_P (stack_parm) = aggregate; | |
3183 | } | |
3184 | #endif /* 0 */ | |
3185 | ||
3186 | /* ENTRY_PARM is an RTX for the parameter as it arrives, | |
3187 | in the mode in which it arrives. | |
3188 | STACK_PARM is an RTX for a stack slot where the parameter can live | |
3189 | during the function (in case we want to put it there). | |
3190 | STACK_PARM is 0 if no stack slot was pushed for it. | |
3191 | ||
3192 | Now output code if necessary to convert ENTRY_PARM to | |
3193 | the type in which this function declares it, | |
3194 | and store that result in an appropriate place, | |
3195 | which may be a pseudo reg, may be STACK_PARM, | |
3196 | or may be a local stack slot if STACK_PARM is 0. | |
3197 | ||
3198 | Set DECL_RTL to that place. */ | |
3199 | ||
3200 | if (nominal_mode == BLKmode) | |
3201 | { | |
3202 | /* If a BLKmode arrives in registers, copy it to a stack slot. */ | |
3203 | if (GET_CODE (entry_parm) == REG) | |
3204 | { | |
3205 | int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)), | |
3206 | UNITS_PER_WORD); | |
3207 | ||
3208 | /* Note that we will be storing an integral number of words. | |
3209 | So we have to be careful to ensure that we allocate an | |
3210 | integral number of words. We do this below in the | |
3211 | assign_stack_local if space was not allocated in the argument | |
3212 | list. If it was, this will not work if PARM_BOUNDARY is not | |
3213 | a multiple of BITS_PER_WORD. It isn't clear how to fix this | |
3214 | if it becomes a problem. */ | |
3215 | ||
3216 | if (stack_parm == 0) | |
3217 | { | |
3218 | stack_parm | |
3219 | = assign_stack_local (GET_MODE (entry_parm), size_stored, 0); | |
3220 | /* If this is a memory ref that contains aggregate components, | |
3221 | mark it as such for cse and loop optimize. */ | |
3222 | MEM_IN_STRUCT_P (stack_parm) = aggregate; | |
3223 | } | |
3224 | ||
3225 | else if (PARM_BOUNDARY % BITS_PER_WORD != 0) | |
3226 | abort (); | |
3227 | ||
3228 | move_block_from_reg (REGNO (entry_parm), | |
3229 | validize_mem (stack_parm), | |
3230 | size_stored / UNITS_PER_WORD, | |
3231 | int_size_in_bytes (TREE_TYPE (parm))); | |
3232 | } | |
3233 | DECL_RTL (parm) = stack_parm; | |
3234 | } | |
3235 | else if (! ((obey_regdecls && ! DECL_REGISTER (parm) | |
3236 | && ! DECL_INLINE (fndecl)) | |
3237 | /* layout_decl may set this. */ | |
3238 | || TREE_ADDRESSABLE (parm) | |
3239 | || TREE_SIDE_EFFECTS (parm) | |
3240 | /* If -ffloat-store specified, don't put explicit | |
3241 | float variables into registers. */ | |
3242 | || (flag_float_store | |
3243 | && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)) | |
3244 | /* Always assign pseudo to structure return or item passed | |
3245 | by invisible reference. */ | |
3246 | || passed_pointer || parm == function_result_decl) | |
3247 | { | |
3248 | /* Store the parm in a pseudoregister during the function, but we | |
3249 | may need to do it in a wider mode. */ | |
3250 | ||
3251 | register rtx parmreg; | |
3252 | ||
3253 | unsignedp = TREE_UNSIGNED (TREE_TYPE (parm)); | |
3254 | if (TREE_CODE (TREE_TYPE (parm)) == INTEGER_TYPE | |
3255 | || TREE_CODE (TREE_TYPE (parm)) == ENUMERAL_TYPE | |
3256 | || TREE_CODE (TREE_TYPE (parm)) == BOOLEAN_TYPE | |
3257 | || TREE_CODE (TREE_TYPE (parm)) == CHAR_TYPE | |
3258 | || TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE | |
3259 | || TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE | |
3260 | || TREE_CODE (TREE_TYPE (parm)) == OFFSET_TYPE) | |
3261 | { | |
3262 | PROMOTE_MODE (nominal_mode, unsignedp, TREE_TYPE (parm)); | |
3263 | } | |
3264 | ||
3265 | parmreg = gen_reg_rtx (nominal_mode); | |
3266 | REG_USERVAR_P (parmreg) = 1; | |
3267 | ||
3268 | /* If this was an item that we received a pointer to, set DECL_RTL | |
3269 | appropriately. */ | |
3270 | if (passed_pointer) | |
3271 | { | |
3272 | DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg); | |
3273 | MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate; | |
3274 | } | |
3275 | else | |
3276 | DECL_RTL (parm) = parmreg; | |
3277 | ||
3278 | /* Copy the value into the register. */ | |
3279 | if (GET_MODE (parmreg) != GET_MODE (entry_parm)) | |
3280 | { | |
3281 | /* If ENTRY_PARM is a hard register, it might be in a register | |
3282 | not valid for operating in its mode (e.g., an odd-numbered | |
3283 | register for a DFmode). In that case, moves are the only | |
3284 | thing valid, so we can't do a convert from there. This | |
3285 | occurs when the calling sequence allow such misaligned | |
3286 | usages. | |
3287 | ||
3288 | In addition, the conversion may involve a call, which could | |
3289 | clobber parameters which haven't been copied to pseudo | |
3290 | registers yet. Therefore, we must first copy the parm to | |
3291 | a pseudo reg here, and save the conversion until after all | |
3292 | parameters have been moved. */ | |
3293 | ||
3294 | rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm)); | |
3295 | ||
3296 | emit_move_insn (tempreg, validize_mem (entry_parm)); | |
3297 | ||
3298 | push_to_sequence (conversion_insns); | |
3299 | convert_move (parmreg, tempreg, unsignedp); | |
3300 | conversion_insns = get_insns (); | |
3301 | end_sequence (); | |
3302 | } | |
3303 | else | |
3304 | emit_move_insn (parmreg, validize_mem (entry_parm)); | |
3305 | ||
3306 | /* If we were passed a pointer but the actual value | |
3307 | can safely live in a register, put it in one. */ | |
3308 | if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode | |
3309 | && ! ((obey_regdecls && ! DECL_REGISTER (parm) | |
3310 | && ! DECL_INLINE (fndecl)) | |
3311 | /* layout_decl may set this. */ | |
3312 | || TREE_ADDRESSABLE (parm) | |
3313 | || TREE_SIDE_EFFECTS (parm) | |
3314 | /* If -ffloat-store specified, don't put explicit | |
3315 | float variables into registers. */ | |
3316 | || (flag_float_store | |
3317 | && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))) | |
3318 | { | |
3319 | /* We can't use nominal_mode, because it will have been set to | |
3320 | Pmode above. We must use the actual mode of the parm. */ | |
3321 | parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm))); | |
3322 | emit_move_insn (parmreg, DECL_RTL (parm)); | |
3323 | DECL_RTL (parm) = parmreg; | |
3324 | } | |
3325 | #ifdef FUNCTION_ARG_CALLEE_COPIES | |
3326 | /* If we are passed an arg by reference and it is our responsibility | |
3327 | to make a copy, do it now. | |
3328 | PASSED_TYPE and PASSED mode now refer to the pointer, not the | |
3329 | original argument, so we must recreate them in the call to | |
3330 | FUNCTION_ARG_CALLEE_COPIES. */ | |
3331 | /* ??? Later add code to handle the case that if the argument isn't | |
3332 | modified, don't do the copy. */ | |
3333 | ||
3334 | else if (passed_pointer | |
3335 | && FUNCTION_ARG_CALLEE_COPIES (args_so_far, | |
3336 | TYPE_MODE (DECL_ARG_TYPE (parm)), | |
3337 | DECL_ARG_TYPE (parm), | |
3338 | ! last_named)) | |
3339 | { | |
3340 | rtx copy; | |
3341 | tree type = DECL_ARG_TYPE (parm); | |
3342 | ||
3343 | /* This sequence may involve a library call perhaps clobbering | |
3344 | registers that haven't been copied to pseudos yet. */ | |
3345 | ||
3346 | push_to_sequence (conversion_insns); | |
3347 | ||
3348 | if (TYPE_SIZE (type) == 0 | |
3349 | || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) | |
3350 | { | |
3351 | /* This is a variable sized object. */ | |
3352 | /* ??? Can we use expr_size here? */ | |
3353 | rtx size_rtx = expand_expr (size_in_bytes (type), NULL_RTX, | |
3354 | TYPE_MODE (sizetype), 0); | |
3355 | ||
3356 | copy = gen_rtx (MEM, BLKmode, | |
3357 | allocate_dynamic_stack_space (size_rtx, NULL_RTX, | |
3358 | TYPE_ALIGN (type))); | |
3359 | } | |
3360 | else | |
3361 | { | |
3362 | int size = int_size_in_bytes (type); | |
3363 | copy = assign_stack_temp (TYPE_MODE (type), size, 1); | |
3364 | } | |
3365 | ||
3366 | store_expr (parm, copy, 0); | |
3367 | emit_move_insn (parmreg, XEXP (copy, 0)); | |
3368 | conversion_insns = get_insns (); | |
3369 | end_sequence (); | |
3370 | } | |
3371 | #endif /* FUNCTION_ARG_CALLEE_COPIES */ | |
3372 | ||
3373 | /* In any case, record the parm's desired stack location | |
3374 | in case we later discover it must live in the stack. */ | |
3375 | if (REGNO (parmreg) >= nparmregs) | |
3376 | { | |
3377 | rtx *new; | |
3378 | int old_nparmregs = nparmregs; | |
3379 | nparmregs = REGNO (parmreg) + 5; | |
3380 | new = (rtx *) oballoc (nparmregs * sizeof (rtx)); | |
3381 | bcopy (parm_reg_stack_loc, new, old_nparmregs * sizeof (rtx)); | |
3382 | bzero (new + old_nparmregs, (nparmregs - old_nparmregs) * sizeof (rtx)); | |
3383 | parm_reg_stack_loc = new; | |
3384 | } | |
3385 | parm_reg_stack_loc[REGNO (parmreg)] = stack_parm; | |
3386 | ||
3387 | /* Mark the register as eliminable if we did no conversion | |
3388 | and it was copied from memory at a fixed offset, | |
3389 | and the arg pointer was not copied to a pseudo-reg. | |
3390 | If the arg pointer is a pseudo reg or the offset formed | |
3391 | an invalid address, such memory-equivalences | |
3392 | as we make here would screw up life analysis for it. */ | |
3393 | if (nominal_mode == passed_mode | |
3394 | && GET_CODE (entry_parm) == MEM | |
3395 | && entry_parm == stack_parm | |
3396 | && stack_offset.var == 0 | |
3397 | && reg_mentioned_p (virtual_incoming_args_rtx, | |
3398 | XEXP (entry_parm, 0))) | |
3399 | REG_NOTES (get_last_insn ()) | |
3400 | = gen_rtx (EXPR_LIST, REG_EQUIV, | |
3401 | entry_parm, REG_NOTES (get_last_insn ())); | |
3402 | ||
3403 | /* For pointer data type, suggest pointer register. */ | |
3404 | if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE) | |
3405 | mark_reg_pointer (parmreg); | |
3406 | } | |
3407 | else | |
3408 | { | |
3409 | /* Value must be stored in the stack slot STACK_PARM | |
3410 | during function execution. */ | |
3411 | ||
3412 | if (passed_mode != nominal_mode) | |
3413 | { | |
3414 | /* Conversion is required. */ | |
3415 | rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm)); | |
3416 | ||
3417 | emit_move_insn (tempreg, validize_mem (entry_parm)); | |
3418 | ||
3419 | push_to_sequence (conversion_insns); | |
3420 | entry_parm = convert_to_mode (nominal_mode, tempreg, | |
3421 | TREE_UNSIGNED (TREE_TYPE (parm))); | |
3422 | conversion_insns = get_insns (); | |
3423 | end_sequence (); | |
3424 | } | |
3425 | ||
3426 | if (entry_parm != stack_parm) | |
3427 | { | |
3428 | if (stack_parm == 0) | |
3429 | { | |
3430 | stack_parm | |
3431 | = assign_stack_local (GET_MODE (entry_parm), | |
3432 | GET_MODE_SIZE (GET_MODE (entry_parm)), 0); | |
3433 | /* If this is a memory ref that contains aggregate components, | |
3434 | mark it as such for cse and loop optimize. */ | |
3435 | MEM_IN_STRUCT_P (stack_parm) = aggregate; | |
3436 | } | |
3437 | ||
3438 | if (passed_mode != nominal_mode) | |
3439 | { | |
3440 | push_to_sequence (conversion_insns); | |
3441 | emit_move_insn (validize_mem (stack_parm), | |
3442 | validize_mem (entry_parm)); | |
3443 | conversion_insns = get_insns (); | |
3444 | end_sequence (); | |
3445 | } | |
3446 | else | |
3447 | emit_move_insn (validize_mem (stack_parm), | |
3448 | validize_mem (entry_parm)); | |
3449 | } | |
3450 | ||
3451 | DECL_RTL (parm) = stack_parm; | |
3452 | } | |
3453 | ||
3454 | /* If this "parameter" was the place where we are receiving the | |
3455 | function's incoming structure pointer, set up the result. */ | |
3456 | if (parm == function_result_decl) | |
3457 | DECL_RTL (DECL_RESULT (fndecl)) | |
3458 | = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm)); | |
3459 | ||
3460 | if (TREE_THIS_VOLATILE (parm)) | |
3461 | MEM_VOLATILE_P (DECL_RTL (parm)) = 1; | |
3462 | if (TREE_READONLY (parm)) | |
3463 | RTX_UNCHANGING_P (DECL_RTL (parm)) = 1; | |
3464 | } | |
3465 | ||
3466 | /* Output all parameter conversion instructions (possibly including calls) | |
3467 | now that all parameters have been copied out of hard registers. */ | |
3468 | emit_insns (conversion_insns); | |
3469 | ||
3470 | max_parm_reg = max_reg_num (); | |
3471 | last_parm_insn = get_last_insn (); | |
3472 | ||
3473 | current_function_args_size = stack_args_size.constant; | |
3474 | ||
3475 | /* Adjust function incoming argument size for alignment and | |
3476 | minimum length. */ | |
3477 | ||
3478 | #ifdef REG_PARM_STACK_SPACE | |
3479 | #ifndef MAYBE_REG_PARM_STACK_SPACE | |
3480 | current_function_args_size = MAX (current_function_args_size, | |
3481 | REG_PARM_STACK_SPACE (fndecl)); | |
3482 | #endif | |
3483 | #endif | |
3484 | ||
3485 | #ifdef STACK_BOUNDARY | |
3486 | #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT) | |
3487 | ||
3488 | current_function_args_size | |
3489 | = ((current_function_args_size + STACK_BYTES - 1) | |
3490 | / STACK_BYTES) * STACK_BYTES; | |
3491 | #endif | |
3492 | ||
3493 | #ifdef ARGS_GROW_DOWNWARD | |
3494 | current_function_arg_offset_rtx | |
3495 | = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant) | |
3496 | : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var, | |
3497 | size_int (-stack_args_size.constant)), | |
3498 | NULL_RTX, VOIDmode, 0)); | |
3499 | #else | |
3500 | current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size); | |
3501 | #endif | |
3502 | ||
3503 | /* See how many bytes, if any, of its args a function should try to pop | |
3504 | on return. */ | |
3505 | ||
3506 | current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl), | |
3507 | current_function_args_size); | |
3508 | ||
3509 | /* For stdarg.h function, save info about regs and stack space | |
3510 | used by the named args. */ | |
3511 | ||
3512 | if (stdarg) | |
3513 | current_function_args_info = args_so_far; | |
3514 | ||
3515 | /* Set the rtx used for the function return value. Put this in its | |
3516 | own variable so any optimizers that need this information don't have | |
3517 | to include tree.h. Do this here so it gets done when an inlined | |
3518 | function gets output. */ | |
3519 | ||
3520 | current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl)); | |
3521 | } | |
3522 | \f | |
3523 | /* Indicate whether REGNO is an incoming argument to the current function | |
3524 | that was promoted to a wider mode. If so, return the RTX for the | |
3525 | register (to get its mode). PMODE and PUNSIGNEDP are set to the mode | |
3526 | that REGNO is promoted from and whether the promotion was signed or | |
3527 | unsigned. */ | |
3528 | ||
3529 | #ifdef PROMOTE_FUNCTION_ARGS | |
3530 | ||
3531 | rtx | |
3532 | promoted_input_arg (regno, pmode, punsignedp) | |
3533 | int regno; | |
3534 | enum machine_mode *pmode; | |
3535 | int *punsignedp; | |
3536 | { | |
3537 | tree arg; | |
3538 | ||
3539 | for (arg = DECL_ARGUMENTS (current_function_decl); arg; | |
3540 | arg = TREE_CHAIN (arg)) | |
3541 | if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG | |
3542 | && REGNO (DECL_INCOMING_RTL (arg)) == regno | |
3543 | && (TREE_CODE (TREE_TYPE (arg)) == INTEGER_TYPE | |
3544 | || TREE_CODE (TREE_TYPE (arg)) == ENUMERAL_TYPE | |
3545 | || TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE | |
3546 | || TREE_CODE (TREE_TYPE (arg)) == CHAR_TYPE | |
3547 | || TREE_CODE (TREE_TYPE (arg)) == REAL_TYPE | |
3548 | || TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE | |
3549 | || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE)) | |
3550 | { | |
3551 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg)); | |
3552 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg)); | |
3553 | ||
3554 | PROMOTE_MODE (mode, unsignedp, TREE_TYPE (arg)); | |
3555 | if (mode == GET_MODE (DECL_INCOMING_RTL (arg)) | |
3556 | && mode != DECL_MODE (arg)) | |
3557 | { | |
3558 | *pmode = DECL_MODE (arg); | |
3559 | *punsignedp = unsignedp; | |
3560 | return DECL_INCOMING_RTL (arg); | |
3561 | } | |
3562 | } | |
3563 | ||
3564 | return 0; | |
3565 | } | |
3566 | ||
3567 | #endif | |
3568 | \f | |
3569 | /* Compute the size and offset from the start of the stacked arguments for a | |
3570 | parm passed in mode PASSED_MODE and with type TYPE. | |
3571 | ||
3572 | INITIAL_OFFSET_PTR points to the current offset into the stacked | |
3573 | arguments. | |
3574 | ||
3575 | The starting offset and size for this parm are returned in *OFFSET_PTR | |
3576 | and *ARG_SIZE_PTR, respectively. | |
3577 | ||
3578 | IN_REGS is non-zero if the argument will be passed in registers. It will | |
3579 | never be set if REG_PARM_STACK_SPACE is not defined. | |
3580 | ||
3581 | FNDECL is the function in which the argument was defined. | |
3582 | ||
3583 | There are two types of rounding that are done. The first, controlled by | |
3584 | FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument | |
3585 | list to be aligned to the specific boundary (in bits). This rounding | |
3586 | affects the initial and starting offsets, but not the argument size. | |
3587 | ||
3588 | The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY, | |
3589 | optionally rounds the size of the parm to PARM_BOUNDARY. The | |
3590 | initial offset is not affected by this rounding, while the size always | |
3591 | is and the starting offset may be. */ | |
3592 | ||
3593 | /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case; | |
3594 | initial_offset_ptr is positive because locate_and_pad_parm's | |
3595 | callers pass in the total size of args so far as | |
3596 | initial_offset_ptr. arg_size_ptr is always positive.*/ | |
3597 | ||
3598 | static void pad_to_arg_alignment (), pad_below (); | |
3599 | ||
3600 | void | |
3601 | locate_and_pad_parm (passed_mode, type, in_regs, fndecl, | |
3602 | initial_offset_ptr, offset_ptr, arg_size_ptr) | |
3603 | enum machine_mode passed_mode; | |
3604 | tree type; | |
3605 | int in_regs; | |
3606 | tree fndecl; | |
3607 | struct args_size *initial_offset_ptr; | |
3608 | struct args_size *offset_ptr; | |
3609 | struct args_size *arg_size_ptr; | |
3610 | { | |
3611 | tree sizetree | |
3612 | = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode)); | |
3613 | enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type); | |
3614 | int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type); | |
3615 | int boundary_in_bytes = boundary / BITS_PER_UNIT; | |
3616 | int reg_parm_stack_space = 0; | |
3617 | ||
3618 | #ifdef REG_PARM_STACK_SPACE | |
3619 | /* If we have found a stack parm before we reach the end of the | |
3620 | area reserved for registers, skip that area. */ | |
3621 | if (! in_regs) | |
3622 | { | |
3623 | #ifdef MAYBE_REG_PARM_STACK_SPACE | |
3624 | reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE; | |
3625 | #else | |
3626 | reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl); | |
3627 | #endif | |
3628 | if (reg_parm_stack_space > 0) | |
3629 | { | |
3630 | if (initial_offset_ptr->var) | |
3631 | { | |
3632 | initial_offset_ptr->var | |
3633 | = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr), | |
3634 | size_int (reg_parm_stack_space)); | |
3635 | initial_offset_ptr->constant = 0; | |
3636 | } | |
3637 | else if (initial_offset_ptr->constant < reg_parm_stack_space) | |
3638 | initial_offset_ptr->constant = reg_parm_stack_space; | |
3639 | } | |
3640 | } | |
3641 | #endif /* REG_PARM_STACK_SPACE */ | |
3642 | ||
3643 | arg_size_ptr->var = 0; | |
3644 | arg_size_ptr->constant = 0; | |
3645 | ||
3646 | #ifdef ARGS_GROW_DOWNWARD | |
3647 | if (initial_offset_ptr->var) | |
3648 | { | |
3649 | offset_ptr->constant = 0; | |
3650 | offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node, | |
3651 | initial_offset_ptr->var); | |
3652 | } | |
3653 | else | |
3654 | { | |
3655 | offset_ptr->constant = - initial_offset_ptr->constant; | |
3656 | offset_ptr->var = 0; | |
3657 | } | |
3658 | if (where_pad == upward | |
3659 | && (TREE_CODE (sizetree) != INTEGER_CST | |
3660 | || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY))) | |
3661 | sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); | |
3662 | SUB_PARM_SIZE (*offset_ptr, sizetree); | |
3663 | if (where_pad != downward) | |
3664 | pad_to_arg_alignment (offset_ptr, boundary); | |
3665 | if (initial_offset_ptr->var) | |
3666 | { | |
3667 | arg_size_ptr->var = size_binop (MINUS_EXPR, | |
3668 | size_binop (MINUS_EXPR, | |
3669 | integer_zero_node, | |
3670 | initial_offset_ptr->var), | |
3671 | offset_ptr->var); | |
3672 | } | |
3673 | else | |
3674 | { | |
3675 | arg_size_ptr->constant = (- initial_offset_ptr->constant - | |
3676 | offset_ptr->constant); | |
3677 | } | |
3678 | /* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */ | |
3679 | if (where_pad == downward) | |
3680 | pad_below (arg_size_ptr, passed_mode, sizetree); | |
3681 | #else /* !ARGS_GROW_DOWNWARD */ | |
3682 | pad_to_arg_alignment (initial_offset_ptr, boundary); | |
3683 | *offset_ptr = *initial_offset_ptr; | |
3684 | ||
3685 | #ifdef PUSH_ROUNDING | |
3686 | if (passed_mode != BLKmode) | |
3687 | sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree))); | |
3688 | #endif | |
3689 | ||
3690 | if (where_pad != none | |
3691 | && (TREE_CODE (sizetree) != INTEGER_CST | |
3692 | || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY))) | |
3693 | sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); | |
3694 | ||
3695 | /* This must be done after rounding sizetree, so that it will subtract | |
3696 | the same value that we explicitly add below. */ | |
3697 | if (where_pad == downward) | |
3698 | pad_below (offset_ptr, passed_mode, sizetree); | |
3699 | ADD_PARM_SIZE (*arg_size_ptr, sizetree); | |
3700 | #endif /* ARGS_GROW_DOWNWARD */ | |
3701 | } | |
3702 | ||
3703 | /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY. | |
3704 | BOUNDARY is measured in bits, but must be a multiple of a storage unit. */ | |
3705 | ||
3706 | static void | |
3707 | pad_to_arg_alignment (offset_ptr, boundary) | |
3708 | struct args_size *offset_ptr; | |
3709 | int boundary; | |
3710 | { | |
3711 | int boundary_in_bytes = boundary / BITS_PER_UNIT; | |
3712 | ||
3713 | if (boundary > BITS_PER_UNIT) | |
3714 | { | |
3715 | if (offset_ptr->var) | |
3716 | { | |
3717 | offset_ptr->var = | |
3718 | #ifdef ARGS_GROW_DOWNWARD | |
3719 | round_down | |
3720 | #else | |
3721 | round_up | |
3722 | #endif | |
3723 | (ARGS_SIZE_TREE (*offset_ptr), | |
3724 | boundary / BITS_PER_UNIT); | |
3725 | offset_ptr->constant = 0; /*?*/ | |
3726 | } | |
3727 | else | |
3728 | offset_ptr->constant = | |
3729 | #ifdef ARGS_GROW_DOWNWARD | |
3730 | FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes); | |
3731 | #else | |
3732 | CEIL_ROUND (offset_ptr->constant, boundary_in_bytes); | |
3733 | #endif | |
3734 | } | |
3735 | } | |
3736 | ||
3737 | static void | |
3738 | pad_below (offset_ptr, passed_mode, sizetree) | |
3739 | struct args_size *offset_ptr; | |
3740 | enum machine_mode passed_mode; | |
3741 | tree sizetree; | |
3742 | { | |
3743 | if (passed_mode != BLKmode) | |
3744 | { | |
3745 | if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY) | |
3746 | offset_ptr->constant | |
3747 | += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1) | |
3748 | / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT) | |
3749 | - GET_MODE_SIZE (passed_mode)); | |
3750 | } | |
3751 | else | |
3752 | { | |
3753 | if (TREE_CODE (sizetree) != INTEGER_CST | |
3754 | || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY) | |
3755 | { | |
3756 | /* Round the size up to multiple of PARM_BOUNDARY bits. */ | |
3757 | tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); | |
3758 | /* Add it in. */ | |
3759 | ADD_PARM_SIZE (*offset_ptr, s2); | |
3760 | SUB_PARM_SIZE (*offset_ptr, sizetree); | |
3761 | } | |
3762 | } | |
3763 | } | |
3764 | ||
3765 | static tree | |
3766 | round_down (value, divisor) | |
3767 | tree value; | |
3768 | int divisor; | |
3769 | { | |
3770 | return size_binop (MULT_EXPR, | |
3771 | size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)), | |
3772 | size_int (divisor)); | |
3773 | } | |
3774 | \f | |
3775 | /* Walk the tree of blocks describing the binding levels within a function | |
3776 | and warn about uninitialized variables. | |
3777 | This is done after calling flow_analysis and before global_alloc | |
3778 | clobbers the pseudo-regs to hard regs. */ | |
3779 | ||
3780 | void | |
3781 | uninitialized_vars_warning (block) | |
3782 | tree block; | |
3783 | { | |
3784 | register tree decl, sub; | |
3785 | for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl)) | |
3786 | { | |
3787 | if (TREE_CODE (decl) == VAR_DECL | |
3788 | /* These warnings are unreliable for and aggregates | |
3789 | because assigning the fields one by one can fail to convince | |
3790 | flow.c that the entire aggregate was initialized. | |
3791 | Unions are troublesome because members may be shorter. */ | |
3792 | && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE | |
3793 | && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE | |
3794 | && TREE_CODE (TREE_TYPE (decl)) != QUAL_UNION_TYPE | |
3795 | && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE | |
3796 | && DECL_RTL (decl) != 0 | |
3797 | && GET_CODE (DECL_RTL (decl)) == REG | |
3798 | && regno_uninitialized (REGNO (DECL_RTL (decl)))) | |
3799 | warning_with_decl (decl, | |
3800 | "`%s' may be used uninitialized in this function"); | |
3801 | if (TREE_CODE (decl) == VAR_DECL | |
3802 | && DECL_RTL (decl) != 0 | |
3803 | && GET_CODE (DECL_RTL (decl)) == REG | |
3804 | && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) | |
3805 | warning_with_decl (decl, | |
3806 | "variable `%s' may be clobbered by `longjmp' or `vfork'"); | |
3807 | } | |
3808 | for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub)) | |
3809 | uninitialized_vars_warning (sub); | |
3810 | } | |
3811 | ||
3812 | /* Do the appropriate part of uninitialized_vars_warning | |
3813 | but for arguments instead of local variables. */ | |
3814 | ||
3815 | void | |
3816 | setjmp_args_warning (block) | |
3817 | tree block; | |
3818 | { | |
3819 | register tree decl; | |
3820 | for (decl = DECL_ARGUMENTS (current_function_decl); | |
3821 | decl; decl = TREE_CHAIN (decl)) | |
3822 | if (DECL_RTL (decl) != 0 | |
3823 | && GET_CODE (DECL_RTL (decl)) == REG | |
3824 | && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) | |
3825 | warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp' or `vfork'"); | |
3826 | } | |
3827 | ||
3828 | /* If this function call setjmp, put all vars into the stack | |
3829 | unless they were declared `register'. */ | |
3830 | ||
3831 | void | |
3832 | setjmp_protect (block) | |
3833 | tree block; | |
3834 | { | |
3835 | register tree decl, sub; | |
3836 | for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl)) | |
3837 | if ((TREE_CODE (decl) == VAR_DECL | |
3838 | || TREE_CODE (decl) == PARM_DECL) | |
3839 | && DECL_RTL (decl) != 0 | |
3840 | && GET_CODE (DECL_RTL (decl)) == REG | |
3841 | /* If this variable came from an inline function, it must be | |
3842 | that it's life doesn't overlap the setjmp. If there was a | |
3843 | setjmp in the function, it would already be in memory. We | |
3844 | must exclude such variable because their DECL_RTL might be | |
3845 | set to strange things such as virtual_stack_vars_rtx. */ | |
3846 | && ! DECL_FROM_INLINE (decl) | |
3847 | && ( | |
3848 | #ifdef NON_SAVING_SETJMP | |
3849 | /* If longjmp doesn't restore the registers, | |
3850 | don't put anything in them. */ | |
3851 | NON_SAVING_SETJMP | |
3852 | || | |
3853 | #endif | |
3854 | ! DECL_REGISTER (decl))) | |
3855 | put_var_into_stack (decl); | |
3856 | for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub)) | |
3857 | setjmp_protect (sub); | |
3858 | } | |
3859 | \f | |
3860 | /* Like the previous function, but for args instead of local variables. */ | |
3861 | ||
3862 | void | |
3863 | setjmp_protect_args () | |
3864 | { | |
3865 | register tree decl, sub; | |
3866 | for (decl = DECL_ARGUMENTS (current_function_decl); | |
3867 | decl; decl = TREE_CHAIN (decl)) | |
3868 | if ((TREE_CODE (decl) == VAR_DECL | |
3869 | || TREE_CODE (decl) == PARM_DECL) | |
3870 | && DECL_RTL (decl) != 0 | |
3871 | && GET_CODE (DECL_RTL (decl)) == REG | |
3872 | && ( | |
3873 | /* If longjmp doesn't restore the registers, | |
3874 | don't put anything in them. */ | |
3875 | #ifdef NON_SAVING_SETJMP | |
3876 | NON_SAVING_SETJMP | |
3877 | || | |
3878 | #endif | |
3879 | ! DECL_REGISTER (decl))) | |
3880 | put_var_into_stack (decl); | |
3881 | } | |
3882 | \f | |
3883 | /* Return the context-pointer register corresponding to DECL, | |
3884 | or 0 if it does not need one. */ | |
3885 | ||
3886 | rtx | |
3887 | lookup_static_chain (decl) | |
3888 | tree decl; | |
3889 | { | |
3890 | tree context = decl_function_context (decl); | |
3891 | tree link; | |
3892 | ||
3893 | if (context == 0) | |
3894 | return 0; | |
3895 | ||
3896 | /* We treat inline_function_decl as an alias for the current function | |
3897 | because that is the inline function whose vars, types, etc. | |
3898 | are being merged into the current function. | |
3899 | See expand_inline_function. */ | |
3900 | if (context == current_function_decl || context == inline_function_decl) | |
3901 | return virtual_stack_vars_rtx; | |
3902 | ||
3903 | for (link = context_display; link; link = TREE_CHAIN (link)) | |
3904 | if (TREE_PURPOSE (link) == context) | |
3905 | return RTL_EXPR_RTL (TREE_VALUE (link)); | |
3906 | ||
3907 | abort (); | |
3908 | } | |
3909 | \f | |
3910 | /* Convert a stack slot address ADDR for variable VAR | |
3911 | (from a containing function) | |
3912 | into an address valid in this function (using a static chain). */ | |
3913 | ||
3914 | rtx | |
3915 | fix_lexical_addr (addr, var) | |
3916 | rtx addr; | |
3917 | tree var; | |
3918 | { | |
3919 | rtx basereg; | |
3920 | int displacement; | |
3921 | tree context = decl_function_context (var); | |
3922 | struct function *fp; | |
3923 | rtx base = 0; | |
3924 | ||
3925 | /* If this is the present function, we need not do anything. */ | |
3926 | if (context == current_function_decl || context == inline_function_decl) | |
3927 | return addr; | |
3928 | ||
3929 | for (fp = outer_function_chain; fp; fp = fp->next) | |
3930 | if (fp->decl == context) | |
3931 | break; | |
3932 | ||
3933 | if (fp == 0) | |
3934 | abort (); | |
3935 | ||
3936 | /* Decode given address as base reg plus displacement. */ | |
3937 | if (GET_CODE (addr) == REG) | |
3938 | basereg = addr, displacement = 0; | |
3939 | else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT) | |
3940 | basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1)); | |
3941 | else | |
3942 | abort (); | |
3943 | ||
3944 | /* We accept vars reached via the containing function's | |
3945 | incoming arg pointer and via its stack variables pointer. */ | |
3946 | if (basereg == fp->internal_arg_pointer) | |
3947 | { | |
3948 | /* If reached via arg pointer, get the arg pointer value | |
3949 | out of that function's stack frame. | |
3950 | ||
3951 | There are two cases: If a separate ap is needed, allocate a | |
3952 | slot in the outer function for it and dereference it that way. | |
3953 | This is correct even if the real ap is actually a pseudo. | |
3954 | Otherwise, just adjust the offset from the frame pointer to | |
3955 | compensate. */ | |
3956 | ||
3957 | #ifdef NEED_SEPARATE_AP | |
3958 | rtx addr; | |
3959 | ||
3960 | if (fp->arg_pointer_save_area == 0) | |
3961 | fp->arg_pointer_save_area | |
3962 | = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp); | |
3963 | ||
3964 | addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var); | |
3965 | addr = memory_address (Pmode, addr); | |
3966 | ||
3967 | base = copy_to_reg (gen_rtx (MEM, Pmode, addr)); | |
3968 | #else | |
3969 | displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET); | |
3970 | base = lookup_static_chain (var); | |
3971 | #endif | |
3972 | } | |
3973 | ||
3974 | else if (basereg == virtual_stack_vars_rtx) | |
3975 | { | |
3976 | /* This is the same code as lookup_static_chain, duplicated here to | |
3977 | avoid an extra call to decl_function_context. */ | |
3978 | tree link; | |
3979 | ||
3980 | for (link = context_display; link; link = TREE_CHAIN (link)) | |
3981 | if (TREE_PURPOSE (link) == context) | |
3982 | { | |
3983 | base = RTL_EXPR_RTL (TREE_VALUE (link)); | |
3984 | break; | |
3985 | } | |
3986 | } | |
3987 | ||
3988 | if (base == 0) | |
3989 | abort (); | |
3990 | ||
3991 | /* Use same offset, relative to appropriate static chain or argument | |
3992 | pointer. */ | |
3993 | return plus_constant (base, displacement); | |
3994 | } | |
3995 | \f | |
3996 | /* Return the address of the trampoline for entering nested fn FUNCTION. | |
3997 | If necessary, allocate a trampoline (in the stack frame) | |
3998 | and emit rtl to initialize its contents (at entry to this function). */ | |
3999 | ||
4000 | rtx | |
4001 | trampoline_address (function) | |
4002 | tree function; | |
4003 | { | |
4004 | tree link; | |
4005 | tree rtlexp; | |
4006 | rtx tramp; | |
4007 | struct function *fp; | |
4008 | tree fn_context; | |
4009 | ||
4010 | /* Find an existing trampoline and return it. */ | |
4011 | for (link = trampoline_list; link; link = TREE_CHAIN (link)) | |
4012 | if (TREE_PURPOSE (link) == function) | |
4013 | return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0); | |
4014 | for (fp = outer_function_chain; fp; fp = fp->next) | |
4015 | for (link = fp->trampoline_list; link; link = TREE_CHAIN (link)) | |
4016 | if (TREE_PURPOSE (link) == function) | |
4017 | { | |
4018 | tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0), | |
4019 | function); | |
4020 | return round_trampoline_addr (tramp); | |
4021 | } | |
4022 | ||
4023 | /* None exists; we must make one. */ | |
4024 | ||
4025 | /* Find the `struct function' for the function containing FUNCTION. */ | |
4026 | fp = 0; | |
4027 | fn_context = decl_function_context (function); | |
4028 | if (fn_context != current_function_decl) | |
4029 | for (fp = outer_function_chain; fp; fp = fp->next) | |
4030 | if (fp->decl == fn_context) | |
4031 | break; | |
4032 | ||
4033 | /* Allocate run-time space for this trampoline | |
4034 | (usually in the defining function's stack frame). */ | |
4035 | #ifdef ALLOCATE_TRAMPOLINE | |
4036 | tramp = ALLOCATE_TRAMPOLINE (fp); | |
4037 | #else | |
4038 | /* If rounding needed, allocate extra space | |
4039 | to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */ | |
4040 | #ifdef TRAMPOLINE_ALIGNMENT | |
4041 | #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1) | |
4042 | #else | |
4043 | #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE) | |
4044 | #endif | |
4045 | if (fp != 0) | |
4046 | tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp); | |
4047 | else | |
4048 | tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0); | |
4049 | #endif | |
4050 | ||
4051 | /* Record the trampoline for reuse and note it for later initialization | |
4052 | by expand_function_end. */ | |
4053 | if (fp != 0) | |
4054 | { | |
4055 | push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack); | |
4056 | rtlexp = make_node (RTL_EXPR); | |
4057 | RTL_EXPR_RTL (rtlexp) = tramp; | |
4058 | fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list); | |
4059 | pop_obstacks (); | |
4060 | } | |
4061 | else | |
4062 | { | |
4063 | /* Make the RTL_EXPR node temporary, not momentary, so that the | |
4064 | trampoline_list doesn't become garbage. */ | |
4065 | int momentary = suspend_momentary (); | |
4066 | rtlexp = make_node (RTL_EXPR); | |
4067 | resume_momentary (momentary); | |
4068 | ||
4069 | RTL_EXPR_RTL (rtlexp) = tramp; | |
4070 | trampoline_list = tree_cons (function, rtlexp, trampoline_list); | |
4071 | } | |
4072 | ||
4073 | tramp = fix_lexical_addr (XEXP (tramp, 0), function); | |
4074 | return round_trampoline_addr (tramp); | |
4075 | } | |
4076 | ||
4077 | /* Given a trampoline address, | |
4078 | round it to multiple of TRAMPOLINE_ALIGNMENT. */ | |
4079 | ||
4080 | static rtx | |
4081 | round_trampoline_addr (tramp) | |
4082 | rtx tramp; | |
4083 | { | |
4084 | #ifdef TRAMPOLINE_ALIGNMENT | |
4085 | /* Round address up to desired boundary. */ | |
4086 | rtx temp = gen_reg_rtx (Pmode); | |
4087 | temp = expand_binop (Pmode, add_optab, tramp, | |
4088 | GEN_INT (TRAMPOLINE_ALIGNMENT - 1), | |
4089 | temp, 0, OPTAB_LIB_WIDEN); | |
4090 | tramp = expand_binop (Pmode, and_optab, temp, | |
4091 | GEN_INT (- TRAMPOLINE_ALIGNMENT), | |
4092 | temp, 0, OPTAB_LIB_WIDEN); | |
4093 | #endif | |
4094 | return tramp; | |
4095 | } | |
4096 | \f | |
4097 | /* The functions identify_blocks and reorder_blocks provide a way to | |
4098 | reorder the tree of BLOCK nodes, for optimizers that reshuffle or | |
4099 | duplicate portions of the RTL code. Call identify_blocks before | |
4100 | changing the RTL, and call reorder_blocks after. */ | |
4101 | ||
4102 | static int all_blocks (); | |
4103 | static tree blocks_nreverse (); | |
4104 | ||
4105 | /* Put all this function's BLOCK nodes into a vector, and return it. | |
4106 | Also store in each NOTE for the beginning or end of a block | |
4107 | the index of that block in the vector. | |
4108 | The arguments are TOP_BLOCK, the top-level block of the function, | |
4109 | and INSNS, the insn chain of the function. */ | |
4110 | ||
4111 | tree * | |
4112 | identify_blocks (top_block, insns) | |
4113 | tree top_block; | |
4114 | rtx insns; | |
4115 | { | |
4116 | int n_blocks; | |
4117 | tree *block_vector; | |
4118 | int *block_stack; | |
4119 | int depth = 0; | |
4120 | int next_block_number = 0; | |
4121 | int current_block_number = 0; | |
4122 | rtx insn; | |
4123 | ||
4124 | if (top_block == 0) | |
4125 | return 0; | |
4126 | ||
4127 | n_blocks = all_blocks (top_block, 0); | |
4128 | block_vector = (tree *) xmalloc (n_blocks * sizeof (tree)); | |
4129 | block_stack = (int *) alloca (n_blocks * sizeof (int)); | |
4130 | ||
4131 | all_blocks (top_block, block_vector); | |
4132 | ||
4133 | for (insn = insns; insn; insn = NEXT_INSN (insn)) | |
4134 | if (GET_CODE (insn) == NOTE) | |
4135 | { | |
4136 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG) | |
4137 | { | |
4138 | block_stack[depth++] = current_block_number; | |
4139 | current_block_number = next_block_number; | |
4140 | NOTE_BLOCK_NUMBER (insn) = next_block_number++; | |
4141 | } | |
4142 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END) | |
4143 | { | |
4144 | current_block_number = block_stack[--depth]; | |
4145 | NOTE_BLOCK_NUMBER (insn) = current_block_number; | |
4146 | } | |
4147 | } | |
4148 | ||
4149 | return block_vector; | |
4150 | } | |
4151 | ||
4152 | /* Given BLOCK_VECTOR which was returned by identify_blocks, | |
4153 | and a revised instruction chain, rebuild the tree structure | |
4154 | of BLOCK nodes to correspond to the new order of RTL. | |
4155 | The new block tree is inserted below TOP_BLOCK. | |
4156 | Returns the current top-level block. */ | |
4157 | ||
4158 | tree | |
4159 | reorder_blocks (block_vector, top_block, insns) | |
4160 | tree *block_vector; | |
4161 | tree top_block; | |
4162 | rtx insns; | |
4163 | { | |
4164 | tree current_block = top_block; | |
4165 | rtx insn; | |
4166 | ||
4167 | if (block_vector == 0) | |
4168 | return top_block; | |
4169 | ||
4170 | /* Prune the old tree away, so that it doesn't get in the way. */ | |
4171 | BLOCK_SUBBLOCKS (current_block) = 0; | |
4172 | ||
4173 | for (insn = insns; insn; insn = NEXT_INSN (insn)) | |
4174 | if (GET_CODE (insn) == NOTE) | |
4175 | { | |
4176 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG) | |
4177 | { | |
4178 | tree block = block_vector[NOTE_BLOCK_NUMBER (insn)]; | |
4179 | /* If we have seen this block before, copy it. */ | |
4180 | if (TREE_ASM_WRITTEN (block)) | |
4181 | block = copy_node (block); | |
4182 | BLOCK_SUBBLOCKS (block) = 0; | |
4183 | TREE_ASM_WRITTEN (block) = 1; | |
4184 | BLOCK_SUPERCONTEXT (block) = current_block; | |
4185 | BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block); | |
4186 | BLOCK_SUBBLOCKS (current_block) = block; | |
4187 | current_block = block; | |
4188 | NOTE_SOURCE_FILE (insn) = 0; | |
4189 | } | |
4190 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END) | |
4191 | { | |
4192 | BLOCK_SUBBLOCKS (current_block) | |
4193 | = blocks_nreverse (BLOCK_SUBBLOCKS (current_block)); | |
4194 | current_block = BLOCK_SUPERCONTEXT (current_block); | |
4195 | NOTE_SOURCE_FILE (insn) = 0; | |
4196 | } | |
4197 | } | |
4198 | ||
4199 | return current_block; | |
4200 | } | |
4201 | ||
4202 | /* Reverse the order of elements in the chain T of blocks, | |
4203 | and return the new head of the chain (old last element). */ | |
4204 | ||
4205 | static tree | |
4206 | blocks_nreverse (t) | |
4207 | tree t; | |
4208 | { | |
4209 | register tree prev = 0, decl, next; | |
4210 | for (decl = t; decl; decl = next) | |
4211 | { | |
4212 | next = BLOCK_CHAIN (decl); | |
4213 | BLOCK_CHAIN (decl) = prev; | |
4214 | prev = decl; | |
4215 | } | |
4216 | return prev; | |
4217 | } | |
4218 | ||
4219 | /* Count the subblocks of BLOCK, and list them all into the vector VECTOR. | |
4220 | Also clear TREE_ASM_WRITTEN in all blocks. */ | |
4221 | ||
4222 | static int | |
4223 | all_blocks (block, vector) | |
4224 | tree block; | |
4225 | tree *vector; | |
4226 | { | |
4227 | int n_blocks = 1; | |
4228 | tree subblocks; | |
4229 | ||
4230 | TREE_ASM_WRITTEN (block) = 0; | |
4231 | /* Record this block. */ | |
4232 | if (vector) | |
4233 | vector[0] = block; | |
4234 | ||
4235 | /* Record the subblocks, and their subblocks. */ | |
4236 | for (subblocks = BLOCK_SUBBLOCKS (block); | |
4237 | subblocks; subblocks = BLOCK_CHAIN (subblocks)) | |
4238 | n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0); | |
4239 | ||
4240 | return n_blocks; | |
4241 | } | |
4242 | \f | |
4243 | /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node) | |
4244 | and initialize static variables for generating RTL for the statements | |
4245 | of the function. */ | |
4246 | ||
4247 | void | |
4248 | init_function_start (subr, filename, line) | |
4249 | tree subr; | |
4250 | char *filename; | |
4251 | int line; | |
4252 | { | |
4253 | char *junk; | |
4254 | ||
4255 | init_stmt_for_function (); | |
4256 | ||
4257 | cse_not_expected = ! optimize; | |
4258 | ||
4259 | /* Caller save not needed yet. */ | |
4260 | caller_save_needed = 0; | |
4261 | ||
4262 | /* No stack slots have been made yet. */ | |
4263 | stack_slot_list = 0; | |
4264 | ||
4265 | /* There is no stack slot for handling nonlocal gotos. */ | |
4266 | nonlocal_goto_handler_slot = 0; | |
4267 | nonlocal_goto_stack_level = 0; | |
4268 | ||
4269 | /* No labels have been declared for nonlocal use. */ | |
4270 | nonlocal_labels = 0; | |
4271 | ||
4272 | /* No function calls so far in this function. */ | |
4273 | function_call_count = 0; | |
4274 | ||
4275 | /* No parm regs have been allocated. | |
4276 | (This is important for output_inline_function.) */ | |
4277 | max_parm_reg = LAST_VIRTUAL_REGISTER + 1; | |
4278 | ||
4279 | /* Initialize the RTL mechanism. */ | |
4280 | init_emit (); | |
4281 | ||
4282 | /* Initialize the queue of pending postincrement and postdecrements, | |
4283 | and some other info in expr.c. */ | |
4284 | init_expr (); | |
4285 | ||
4286 | /* We haven't done register allocation yet. */ | |
4287 | reg_renumber = 0; | |
4288 | ||
4289 | init_const_rtx_hash_table (); | |
4290 | ||
4291 | current_function_name = (*decl_printable_name) (subr, &junk); | |
4292 | ||
4293 | /* Nonzero if this is a nested function that uses a static chain. */ | |
4294 | ||
4295 | current_function_needs_context | |
4296 | = (decl_function_context (current_function_decl) != 0); | |
4297 | ||
4298 | /* Set if a call to setjmp is seen. */ | |
4299 | current_function_calls_setjmp = 0; | |
4300 | ||
4301 | /* Set if a call to longjmp is seen. */ | |
4302 | current_function_calls_longjmp = 0; | |
4303 | ||
4304 | current_function_calls_alloca = 0; | |
4305 | current_function_has_nonlocal_label = 0; | |
4306 | current_function_contains_functions = 0; | |
4307 | ||
4308 | current_function_returns_pcc_struct = 0; | |
4309 | current_function_returns_struct = 0; | |
4310 | current_function_epilogue_delay_list = 0; | |
4311 | current_function_uses_const_pool = 0; | |
4312 | current_function_uses_pic_offset_table = 0; | |
4313 | ||
4314 | /* We have not yet needed to make a label to jump to for tail-recursion. */ | |
4315 | tail_recursion_label = 0; | |
4316 | ||
4317 | /* We haven't had a need to make a save area for ap yet. */ | |
4318 | ||
4319 | arg_pointer_save_area = 0; | |
4320 | ||
4321 | /* No stack slots allocated yet. */ | |
4322 | frame_offset = 0; | |
4323 | ||
4324 | /* No SAVE_EXPRs in this function yet. */ | |
4325 | save_expr_regs = 0; | |
4326 | ||
4327 | /* No RTL_EXPRs in this function yet. */ | |
4328 | rtl_expr_chain = 0; | |
4329 | ||
4330 | /* We have not allocated any temporaries yet. */ | |
4331 | temp_slots = 0; | |
4332 | temp_slot_level = 0; | |
4333 | ||
4334 | /* Within function body, compute a type's size as soon it is laid out. */ | |
4335 | immediate_size_expand++; | |
4336 | ||
4337 | /* We haven't made any trampolines for this function yet. */ | |
4338 | trampoline_list = 0; | |
4339 | ||
4340 | init_pending_stack_adjust (); | |
4341 | inhibit_defer_pop = 0; | |
4342 | ||
4343 | current_function_outgoing_args_size = 0; | |
4344 | ||
4345 | /* Initialize the insn lengths. */ | |
4346 | init_insn_lengths (); | |
4347 | ||
4348 | /* Prevent ever trying to delete the first instruction of a function. | |
4349 | Also tell final how to output a linenum before the function prologue. */ | |
4350 | emit_line_note (filename, line); | |
4351 | ||
4352 | /* Make sure first insn is a note even if we don't want linenums. | |
4353 | This makes sure the first insn will never be deleted. | |
4354 | Also, final expects a note to appear there. */ | |
4355 | emit_note (NULL_PTR, NOTE_INSN_DELETED); | |
4356 | ||
4357 | /* Set flags used by final.c. */ | |
4358 | if (aggregate_value_p (DECL_RESULT (subr))) | |
4359 | { | |
4360 | #ifdef PCC_STATIC_STRUCT_RETURN | |
4361 | current_function_returns_pcc_struct = 1; | |
4362 | #endif | |
4363 | current_function_returns_struct = 1; | |
4364 | } | |
4365 | ||
4366 | /* Warn if this value is an aggregate type, | |
4367 | regardless of which calling convention we are using for it. */ | |
4368 | if (warn_aggregate_return | |
4369 | && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE | |
4370 | || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE | |
4371 | || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == QUAL_UNION_TYPE | |
4372 | || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE)) | |
4373 | warning ("function returns an aggregate"); | |
4374 | ||
4375 | current_function_returns_pointer | |
4376 | = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE); | |
4377 | ||
4378 | /* Indicate that we need to distinguish between the return value of the | |
4379 | present function and the return value of a function being called. */ | |
4380 | rtx_equal_function_value_matters = 1; | |
4381 | ||
4382 | /* Indicate that we have not instantiated virtual registers yet. */ | |
4383 | virtuals_instantiated = 0; | |
4384 | ||
4385 | /* Indicate we have no need of a frame pointer yet. */ | |
4386 | frame_pointer_needed = 0; | |
4387 | ||
4388 | /* By default assume not varargs. */ | |
4389 | current_function_varargs = 0; | |
4390 | } | |
4391 | ||
4392 | /* Indicate that the current function uses extra args | |
4393 | not explicitly mentioned in the argument list in any fashion. */ | |
4394 | ||
4395 | void | |
4396 | mark_varargs () | |
4397 | { | |
4398 | current_function_varargs = 1; | |
4399 | } | |
4400 | ||
4401 | /* Expand a call to __main at the beginning of a possible main function. */ | |
4402 | ||
4403 | void | |
4404 | expand_main_function () | |
4405 | { | |
4406 | #if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main) | |
4407 | emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0, | |
4408 | VOIDmode, 0); | |
4409 | #endif /* not INIT_SECTION_ASM_OP or INVOKE__main */ | |
4410 | } | |
4411 | \f | |
4412 | /* Start the RTL for a new function, and set variables used for | |
4413 | emitting RTL. | |
4414 | SUBR is the FUNCTION_DECL node. | |
4415 | PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with | |
4416 | the function's parameters, which must be run at any return statement. */ | |
4417 | ||
4418 | void | |
4419 | expand_function_start (subr, parms_have_cleanups) | |
4420 | tree subr; | |
4421 | int parms_have_cleanups; | |
4422 | { | |
4423 | register int i; | |
4424 | tree tem; | |
4425 | rtx last_ptr; | |
4426 | ||
4427 | /* Make sure volatile mem refs aren't considered | |
4428 | valid operands of arithmetic insns. */ | |
4429 | init_recog_no_volatile (); | |
4430 | ||
4431 | /* If function gets a static chain arg, store it in the stack frame. | |
4432 | Do this first, so it gets the first stack slot offset. */ | |
4433 | if (current_function_needs_context) | |
4434 | { | |
4435 | last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); | |
4436 | emit_move_insn (last_ptr, static_chain_incoming_rtx); | |
4437 | } | |
4438 | ||
4439 | /* If the parameters of this function need cleaning up, get a label | |
4440 | for the beginning of the code which executes those cleanups. This must | |
4441 | be done before doing anything with return_label. */ | |
4442 | if (parms_have_cleanups) | |
4443 | cleanup_label = gen_label_rtx (); | |
4444 | else | |
4445 | cleanup_label = 0; | |
4446 | ||
4447 | /* Make the label for return statements to jump to, if this machine | |
4448 | does not have a one-instruction return and uses an epilogue, | |
4449 | or if it returns a structure, or if it has parm cleanups. */ | |
4450 | #ifdef HAVE_return | |
4451 | if (cleanup_label == 0 && HAVE_return | |
4452 | && ! current_function_returns_pcc_struct | |
4453 | && ! (current_function_returns_struct && ! optimize)) | |
4454 | return_label = 0; | |
4455 | else | |
4456 | return_label = gen_label_rtx (); | |
4457 | #else | |
4458 | return_label = gen_label_rtx (); | |
4459 | #endif | |
4460 | ||
4461 | /* Initialize rtx used to return the value. */ | |
4462 | /* Do this before assign_parms so that we copy the struct value address | |
4463 | before any library calls that assign parms might generate. */ | |
4464 | ||
4465 | /* Decide whether to return the value in memory or in a register. */ | |
4466 | if (aggregate_value_p (DECL_RESULT (subr))) | |
4467 | { | |
4468 | /* Returning something that won't go in a register. */ | |
4469 | register rtx value_address; | |
4470 | ||
4471 | #ifdef PCC_STATIC_STRUCT_RETURN | |
4472 | if (current_function_returns_pcc_struct) | |
4473 | { | |
4474 | int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr))); | |
4475 | value_address = assemble_static_space (size); | |
4476 | } | |
4477 | else | |
4478 | #endif | |
4479 | { | |
4480 | /* Expect to be passed the address of a place to store the value. | |
4481 | If it is passed as an argument, assign_parms will take care of | |
4482 | it. */ | |
4483 | if (struct_value_incoming_rtx) | |
4484 | { | |
4485 | value_address = gen_reg_rtx (Pmode); | |
4486 | emit_move_insn (value_address, struct_value_incoming_rtx); | |
4487 | } | |
4488 | } | |
4489 | if (value_address) | |
4490 | DECL_RTL (DECL_RESULT (subr)) | |
4491 | = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), | |
4492 | value_address); | |
4493 | } | |
4494 | else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode) | |
4495 | /* If return mode is void, this decl rtl should not be used. */ | |
4496 | DECL_RTL (DECL_RESULT (subr)) = 0; | |
4497 | else if (parms_have_cleanups) | |
4498 | { | |
4499 | /* If function will end with cleanup code for parms, | |
4500 | compute the return values into a pseudo reg, | |
4501 | which we will copy into the true return register | |
4502 | after the cleanups are done. */ | |
4503 | ||
4504 | enum machine_mode mode = DECL_MODE (DECL_RESULT (subr)); | |
4505 | #ifdef PROMOTE_FUNCTION_RETURN | |
4506 | tree type = TREE_TYPE (DECL_RESULT (subr)); | |
4507 | int unsignedp = TREE_UNSIGNED (type); | |
4508 | ||
4509 | if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE | |
4510 | || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE | |
4511 | || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE | |
4512 | || TREE_CODE (type) == OFFSET_TYPE) | |
4513 | { | |
4514 | PROMOTE_MODE (mode, unsignedp, type); | |
4515 | } | |
4516 | #endif | |
4517 | ||
4518 | DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode); | |
4519 | } | |
4520 | else | |
4521 | /* Scalar, returned in a register. */ | |
4522 | { | |
4523 | #ifdef FUNCTION_OUTGOING_VALUE | |
4524 | DECL_RTL (DECL_RESULT (subr)) | |
4525 | = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr); | |
4526 | #else | |
4527 | DECL_RTL (DECL_RESULT (subr)) | |
4528 | = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr); | |
4529 | #endif | |
4530 | ||
4531 | /* Mark this reg as the function's return value. */ | |
4532 | if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG) | |
4533 | { | |
4534 | REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1; | |
4535 | /* Needed because we may need to move this to memory | |
4536 | in case it's a named return value whose address is taken. */ | |
4537 | DECL_REGISTER (DECL_RESULT (subr)) = 1; | |
4538 | } | |
4539 | } | |
4540 | ||
4541 | /* Initialize rtx for parameters and local variables. | |
4542 | In some cases this requires emitting insns. */ | |
4543 | ||
4544 | assign_parms (subr, 0); | |
4545 | ||
4546 | /* The following was moved from init_function_start. | |
4547 | The move is supposed to make sdb output more accurate. */ | |
4548 | /* Indicate the beginning of the function body, | |
4549 | as opposed to parm setup. */ | |
4550 | emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG); | |
4551 | ||
4552 | /* If doing stupid allocation, mark parms as born here. */ | |
4553 | ||
4554 | if (GET_CODE (get_last_insn ()) != NOTE) | |
4555 | emit_note (NULL_PTR, NOTE_INSN_DELETED); | |
4556 | parm_birth_insn = get_last_insn (); | |
4557 | ||
4558 | if (obey_regdecls) | |
4559 | { | |
4560 | for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++) | |
4561 | use_variable (regno_reg_rtx[i]); | |
4562 | ||
4563 | if (current_function_internal_arg_pointer != virtual_incoming_args_rtx) | |
4564 | use_variable (current_function_internal_arg_pointer); | |
4565 | } | |
4566 | ||
4567 | /* Fetch static chain values for containing functions. */ | |
4568 | tem = decl_function_context (current_function_decl); | |
4569 | /* If not doing stupid register allocation, then start off with the static | |
4570 | chain pointer in a pseudo register. Otherwise, we use the stack | |
4571 | address that was generated above. */ | |
4572 | if (tem && ! obey_regdecls) | |
4573 | last_ptr = copy_to_reg (static_chain_incoming_rtx); | |
4574 | context_display = 0; | |
4575 | while (tem) | |
4576 | { | |
4577 | tree rtlexp = make_node (RTL_EXPR); | |
4578 | ||
4579 | RTL_EXPR_RTL (rtlexp) = last_ptr; | |
4580 | context_display = tree_cons (tem, rtlexp, context_display); | |
4581 | tem = decl_function_context (tem); | |
4582 | if (tem == 0) | |
4583 | break; | |
4584 | /* Chain thru stack frames, assuming pointer to next lexical frame | |
4585 | is found at the place we always store it. */ | |
4586 | #ifdef FRAME_GROWS_DOWNWARD | |
4587 | last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode)); | |
4588 | #endif | |
4589 | last_ptr = copy_to_reg (gen_rtx (MEM, Pmode, | |
4590 | memory_address (Pmode, last_ptr))); | |
4591 | } | |
4592 | ||
4593 | /* After the display initializations is where the tail-recursion label | |
4594 | should go, if we end up needing one. Ensure we have a NOTE here | |
4595 | since some things (like trampolines) get placed before this. */ | |
4596 | tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED); | |
4597 | ||
4598 | /* Evaluate now the sizes of any types declared among the arguments. */ | |
4599 | for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem)) | |
4600 | expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0); | |
4601 | ||
4602 | /* Make sure there is a line number after the function entry setup code. */ | |
4603 | force_next_line_note (); | |
4604 | } | |
4605 | \f | |
4606 | /* Generate RTL for the end of the current function. | |
4607 | FILENAME and LINE are the current position in the source file. */ | |
4608 | ||
4609 | /* It is up to language-specific callers to do cleanups for parameters. */ | |
4610 | ||
4611 | void | |
4612 | expand_function_end (filename, line) | |
4613 | char *filename; | |
4614 | int line; | |
4615 | { | |
4616 | register int i; | |
4617 | tree link; | |
4618 | ||
4619 | static rtx initial_trampoline; | |
4620 | ||
4621 | #ifdef NON_SAVING_SETJMP | |
4622 | /* Don't put any variables in registers if we call setjmp | |
4623 | on a machine that fails to restore the registers. */ | |
4624 | if (NON_SAVING_SETJMP && current_function_calls_setjmp) | |
4625 | { | |
4626 | setjmp_protect (DECL_INITIAL (current_function_decl)); | |
4627 | setjmp_protect_args (); | |
4628 | } | |
4629 | #endif | |
4630 | ||
4631 | /* Save the argument pointer if a save area was made for it. */ | |
4632 | if (arg_pointer_save_area) | |
4633 | { | |
4634 | rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx); | |
4635 | emit_insn_before (x, tail_recursion_reentry); | |
4636 | } | |
4637 | ||
4638 | /* Initialize any trampolines required by this function. */ | |
4639 | for (link = trampoline_list; link; link = TREE_CHAIN (link)) | |
4640 | { | |
4641 | tree function = TREE_PURPOSE (link); | |
4642 | rtx context = lookup_static_chain (function); | |
4643 | rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link)); | |
4644 | rtx seq; | |
4645 | ||
4646 | /* First make sure this compilation has a template for | |
4647 | initializing trampolines. */ | |
4648 | if (initial_trampoline == 0) | |
4649 | { | |
4650 | end_temporary_allocation (); | |
4651 | initial_trampoline | |
4652 | = gen_rtx (MEM, BLKmode, assemble_trampoline_template ()); | |
4653 | resume_temporary_allocation (); | |
4654 | } | |
4655 | ||
4656 | /* Generate insns to initialize the trampoline. */ | |
4657 | start_sequence (); | |
4658 | tramp = change_address (initial_trampoline, BLKmode, | |
4659 | round_trampoline_addr (XEXP (tramp, 0))); | |
4660 | emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE), | |
4661 | FUNCTION_BOUNDARY / BITS_PER_UNIT); | |
4662 | INITIALIZE_TRAMPOLINE (XEXP (tramp, 0), | |
4663 | XEXP (DECL_RTL (function), 0), context); | |
4664 | seq = get_insns (); | |
4665 | end_sequence (); | |
4666 | ||
4667 | /* Put those insns at entry to the containing function (this one). */ | |
4668 | emit_insns_before (seq, tail_recursion_reentry); | |
4669 | } | |
4670 | ||
4671 | #if 0 /* I think unused parms are legitimate enough. */ | |
4672 | /* Warn about unused parms. */ | |
4673 | if (warn_unused) | |
4674 | { | |
4675 | rtx decl; | |
4676 | ||
4677 | for (decl = DECL_ARGUMENTS (current_function_decl); | |
4678 | decl; decl = TREE_CHAIN (decl)) | |
4679 | if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL) | |
4680 | warning_with_decl (decl, "unused parameter `%s'"); | |
4681 | } | |
4682 | #endif | |
4683 | ||
4684 | /* Delete handlers for nonlocal gotos if nothing uses them. */ | |
4685 | if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label) | |
4686 | delete_handlers (); | |
4687 | ||
4688 | /* End any sequences that failed to be closed due to syntax errors. */ | |
4689 | while (in_sequence_p ()) | |
4690 | end_sequence (); | |
4691 | ||
4692 | /* Outside function body, can't compute type's actual size | |
4693 | until next function's body starts. */ | |
4694 | immediate_size_expand--; | |
4695 | ||
4696 | /* If doing stupid register allocation, | |
4697 | mark register parms as dying here. */ | |
4698 | ||
4699 | if (obey_regdecls) | |
4700 | { | |
4701 | rtx tem; | |
4702 | for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++) | |
4703 | use_variable (regno_reg_rtx[i]); | |
4704 | ||
4705 | /* Likewise for the regs of all the SAVE_EXPRs in the function. */ | |
4706 | ||
4707 | for (tem = save_expr_regs; tem; tem = XEXP (tem, 1)) | |
4708 | { | |
4709 | use_variable (XEXP (tem, 0)); | |
4710 | use_variable_after (XEXP (tem, 0), parm_birth_insn); | |
4711 | } | |
4712 | ||
4713 | if (current_function_internal_arg_pointer != virtual_incoming_args_rtx) | |
4714 | use_variable (current_function_internal_arg_pointer); | |
4715 | } | |
4716 | ||
4717 | clear_pending_stack_adjust (); | |
4718 | do_pending_stack_adjust (); | |
4719 | ||
4720 | /* Mark the end of the function body. | |
4721 | If control reaches this insn, the function can drop through | |
4722 | without returning a value. */ | |
4723 | emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END); | |
4724 | ||
4725 | /* Output a linenumber for the end of the function. | |
4726 | SDB depends on this. */ | |
4727 | emit_line_note_force (filename, line); | |
4728 | ||
4729 | /* Output the label for the actual return from the function, | |
4730 | if one is expected. This happens either because a function epilogue | |
4731 | is used instead of a return instruction, or because a return was done | |
4732 | with a goto in order to run local cleanups, or because of pcc-style | |
4733 | structure returning. */ | |
4734 | ||
4735 | if (return_label) | |
4736 | emit_label (return_label); | |
4737 | ||
4738 | /* If we had calls to alloca, and this machine needs | |
4739 | an accurate stack pointer to exit the function, | |
4740 | insert some code to save and restore the stack pointer. */ | |
4741 | #ifdef EXIT_IGNORE_STACK | |
4742 | if (! EXIT_IGNORE_STACK) | |
4743 | #endif | |
4744 | if (current_function_calls_alloca) | |
4745 | { | |
4746 | rtx tem = 0; | |
4747 | ||
4748 | emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn); | |
4749 | emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX); | |
4750 | } | |
4751 | ||
4752 | /* If scalar return value was computed in a pseudo-reg, | |
4753 | copy that to the hard return register. */ | |
4754 | if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0 | |
4755 | && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG | |
4756 | && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl))) | |
4757 | >= FIRST_PSEUDO_REGISTER)) | |
4758 | { | |
4759 | rtx real_decl_result; | |
4760 | ||
4761 | #ifdef FUNCTION_OUTGOING_VALUE | |
4762 | real_decl_result | |
4763 | = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)), | |
4764 | current_function_decl); | |
4765 | #else | |
4766 | real_decl_result | |
4767 | = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)), | |
4768 | current_function_decl); | |
4769 | #endif | |
4770 | REG_FUNCTION_VALUE_P (real_decl_result) = 1; | |
4771 | emit_move_insn (real_decl_result, | |
4772 | DECL_RTL (DECL_RESULT (current_function_decl))); | |
4773 | emit_insn (gen_rtx (USE, VOIDmode, real_decl_result)); | |
4774 | } | |
4775 | ||
4776 | /* If returning a structure, arrange to return the address of the value | |
4777 | in a place where debuggers expect to find it. | |
4778 | ||
4779 | If returning a structure PCC style, | |
4780 | the caller also depends on this value. | |
4781 | And current_function_returns_pcc_struct is not necessarily set. */ | |
4782 | if (current_function_returns_struct | |
4783 | || current_function_returns_pcc_struct) | |
4784 | { | |
4785 | rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0); | |
4786 | tree type = TREE_TYPE (DECL_RESULT (current_function_decl)); | |
4787 | #ifdef FUNCTION_OUTGOING_VALUE | |
4788 | rtx outgoing | |
4789 | = FUNCTION_OUTGOING_VALUE (build_pointer_type (type), | |
4790 | current_function_decl); | |
4791 | #else | |
4792 | rtx outgoing | |
4793 | = FUNCTION_VALUE (build_pointer_type (type), | |
4794 | current_function_decl); | |
4795 | #endif | |
4796 | ||
4797 | /* Mark this as a function return value so integrate will delete the | |
4798 | assignment and USE below when inlining this function. */ | |
4799 | REG_FUNCTION_VALUE_P (outgoing) = 1; | |
4800 | ||
4801 | emit_move_insn (outgoing, value_address); | |
4802 | use_variable (outgoing); | |
4803 | } | |
4804 | ||
4805 | /* Output a return insn if we are using one. | |
4806 | Otherwise, let the rtl chain end here, to drop through | |
4807 | into the epilogue. */ | |
4808 | ||
4809 | #ifdef HAVE_return | |
4810 | if (HAVE_return) | |
4811 | { | |
4812 | emit_jump_insn (gen_return ()); | |
4813 | emit_barrier (); | |
4814 | } | |
4815 | #endif | |
4816 | ||
4817 | /* Fix up any gotos that jumped out to the outermost | |
4818 | binding level of the function. | |
4819 | Must follow emitting RETURN_LABEL. */ | |
4820 | ||
4821 | /* If you have any cleanups to do at this point, | |
4822 | and they need to create temporary variables, | |
4823 | then you will lose. */ | |
4824 | fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0); | |
4825 | } | |
4826 | \f | |
4827 | /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */ | |
4828 | ||
4829 | static int *prologue; | |
4830 | static int *epilogue; | |
4831 | ||
4832 | /* Create an array that records the INSN_UIDs of INSNS (either a sequence | |
4833 | or a single insn). */ | |
4834 | ||
4835 | static int * | |
4836 | record_insns (insns) | |
4837 | rtx insns; | |
4838 | { | |
4839 | int *vec; | |
4840 | ||
4841 | if (GET_CODE (insns) == SEQUENCE) | |
4842 | { | |
4843 | int len = XVECLEN (insns, 0); | |
4844 | vec = (int *) oballoc ((len + 1) * sizeof (int)); | |
4845 | vec[len] = 0; | |
4846 | while (--len >= 0) | |
4847 | vec[len] = INSN_UID (XVECEXP (insns, 0, len)); | |
4848 | } | |
4849 | else | |
4850 | { | |
4851 | vec = (int *) oballoc (2 * sizeof (int)); | |
4852 | vec[0] = INSN_UID (insns); | |
4853 | vec[1] = 0; | |
4854 | } | |
4855 | return vec; | |
4856 | } | |
4857 | ||
4858 | /* Determine how many INSN_UIDs in VEC are part of INSN. */ | |
4859 | ||
4860 | static int | |
4861 | contains (insn, vec) | |
4862 | rtx insn; | |
4863 | int *vec; | |
4864 | { | |
4865 | register int i, j; | |
4866 | ||
4867 | if (GET_CODE (insn) == INSN | |
4868 | && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
4869 | { | |
4870 | int count = 0; | |
4871 | for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--) | |
4872 | for (j = 0; vec[j]; j++) | |
4873 | if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j]) | |
4874 | count++; | |
4875 | return count; | |
4876 | } | |
4877 | else | |
4878 | { | |
4879 | for (j = 0; vec[j]; j++) | |
4880 | if (INSN_UID (insn) == vec[j]) | |
4881 | return 1; | |
4882 | } | |
4883 | return 0; | |
4884 | } | |
4885 | ||
4886 | /* Generate the prologe and epilogue RTL if the machine supports it. Thread | |
4887 | this into place with notes indicating where the prologue ends and where | |
4888 | the epilogue begins. Update the basic block information when possible. */ | |
4889 | ||
4890 | void | |
4891 | thread_prologue_and_epilogue_insns (f) | |
4892 | rtx f; | |
4893 | { | |
4894 | #ifdef HAVE_prologue | |
4895 | if (HAVE_prologue) | |
4896 | { | |
4897 | rtx head, seq, insn; | |
4898 | ||
4899 | /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more | |
4900 | prologue insns and a NOTE_INSN_PROLOGUE_END. */ | |
4901 | emit_note_after (NOTE_INSN_PROLOGUE_END, f); | |
4902 | seq = gen_prologue (); | |
4903 | head = emit_insn_after (seq, f); | |
4904 | ||
4905 | /* Include the new prologue insns in the first block. Ignore them | |
4906 | if they form a basic block unto themselves. */ | |
4907 | if (basic_block_head && n_basic_blocks | |
4908 | && GET_CODE (basic_block_head[0]) != CODE_LABEL) | |
4909 | basic_block_head[0] = NEXT_INSN (f); | |
4910 | ||
4911 | /* Retain a map of the prologue insns. */ | |
4912 | prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head); | |
4913 | } | |
4914 | else | |
4915 | #endif | |
4916 | prologue = 0; | |
4917 | ||
4918 | #ifdef HAVE_epilogue | |
4919 | if (HAVE_epilogue) | |
4920 | { | |
4921 | rtx insn = get_last_insn (); | |
4922 | rtx prev = prev_nonnote_insn (insn); | |
4923 | ||
4924 | /* If we end with a BARRIER, we don't need an epilogue. */ | |
4925 | if (! (prev && GET_CODE (prev) == BARRIER)) | |
4926 | { | |
4927 | rtx tail, seq, tem; | |
4928 | rtx first_use = 0; | |
4929 | rtx last_use = 0; | |
4930 | ||
4931 | /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the | |
4932 | epilogue insns, the USE insns at the end of a function, | |
4933 | the jump insn that returns, and then a BARRIER. */ | |
4934 | ||
4935 | /* Move the USE insns at the end of a function onto a list. */ | |
4936 | while (prev | |
4937 | && GET_CODE (prev) == INSN | |
4938 | && GET_CODE (PATTERN (prev)) == USE) | |
4939 | { | |
4940 | tem = prev; | |
4941 | prev = prev_nonnote_insn (prev); | |
4942 | ||
4943 | NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem); | |
4944 | PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem); | |
4945 | if (! first_use) | |
4946 | first_use = tem; | |
4947 | if (last_use) | |
4948 | NEXT_INSN (last_use) = tem; | |
4949 | else | |
4950 | last_use = tem; | |
4951 | } | |
4952 | ||
4953 | emit_barrier_after (insn); | |
4954 | ||
4955 | seq = gen_epilogue (); | |
4956 | tail = emit_jump_insn_after (seq, insn); | |
4957 | ||
4958 | /* Insert the USE insns immediately before the return insn, which | |
4959 | must be the first instruction before the final barrier. */ | |
4960 | if (first_use) | |
4961 | { | |
4962 | tem = prev_nonnote_insn (get_last_insn ()); | |
4963 | NEXT_INSN (PREV_INSN (tem)) = first_use; | |
4964 | PREV_INSN (first_use) = PREV_INSN (tem); | |
4965 | PREV_INSN (tem) = last_use; | |
4966 | NEXT_INSN (last_use) = tem; | |
4967 | } | |
4968 | ||
4969 | emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn); | |
4970 | ||
4971 | /* Include the new epilogue insns in the last block. Ignore | |
4972 | them if they form a basic block unto themselves. */ | |
4973 | if (basic_block_end && n_basic_blocks | |
4974 | && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN) | |
4975 | basic_block_end[n_basic_blocks - 1] = tail; | |
4976 | ||
4977 | /* Retain a map of the epilogue insns. */ | |
4978 | epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail); | |
4979 | return; | |
4980 | } | |
4981 | } | |
4982 | #endif | |
4983 | epilogue = 0; | |
4984 | } | |
4985 | ||
4986 | /* Reposition the prologue-end and epilogue-begin notes after instruction | |
4987 | scheduling and delayed branch scheduling. */ | |
4988 | ||
4989 | void | |
4990 | reposition_prologue_and_epilogue_notes (f) | |
4991 | rtx f; | |
4992 | { | |
4993 | #if defined (HAVE_prologue) || defined (HAVE_epilogue) | |
4994 | /* Reposition the prologue and epilogue notes. */ | |
4995 | if (n_basic_blocks) | |
4996 | { | |
4997 | rtx next, prev; | |
4998 | int len; | |
4999 | ||
5000 | if (prologue) | |
5001 | { | |
5002 | register rtx insn, note = 0; | |
5003 | ||
5004 | /* Scan from the beginning until we reach the last prologue insn. | |
5005 | We apparently can't depend on basic_block_{head,end} after | |
5006 | reorg has run. */ | |
5007 | for (len = 0; prologue[len]; len++) | |
5008 | ; | |
5009 | for (insn = f; len && insn; insn = NEXT_INSN (insn)) | |
5010 | { | |
5011 | if (GET_CODE (insn) == NOTE) | |
5012 | { | |
5013 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END) | |
5014 | note = insn; | |
5015 | } | |
5016 | else if ((len -= contains (insn, prologue)) == 0) | |
5017 | { | |
5018 | /* Find the prologue-end note if we haven't already, and | |
5019 | move it to just after the last prologue insn. */ | |
5020 | if (note == 0) | |
5021 | { | |
5022 | for (note = insn; note = NEXT_INSN (note);) | |
5023 | if (GET_CODE (note) == NOTE | |
5024 | && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END) | |
5025 | break; | |
5026 | } | |
5027 | next = NEXT_INSN (note); | |
5028 | prev = PREV_INSN (note); | |
5029 | if (prev) | |
5030 | NEXT_INSN (prev) = next; | |
5031 | if (next) | |
5032 | PREV_INSN (next) = prev; | |
5033 | add_insn_after (note, insn); | |
5034 | } | |
5035 | } | |
5036 | } | |
5037 | ||
5038 | if (epilogue) | |
5039 | { | |
5040 | register rtx insn, note = 0; | |
5041 | ||
5042 | /* Scan from the end until we reach the first epilogue insn. | |
5043 | We apparently can't depend on basic_block_{head,end} after | |
5044 | reorg has run. */ | |
5045 | for (len = 0; epilogue[len]; len++) | |
5046 | ; | |
5047 | for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn)) | |
5048 | { | |
5049 | if (GET_CODE (insn) == NOTE) | |
5050 | { | |
5051 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG) | |
5052 | note = insn; | |
5053 | } | |
5054 | else if ((len -= contains (insn, epilogue)) == 0) | |
5055 | { | |
5056 | /* Find the epilogue-begin note if we haven't already, and | |
5057 | move it to just before the first epilogue insn. */ | |
5058 | if (note == 0) | |
5059 | { | |
5060 | for (note = insn; note = PREV_INSN (note);) | |
5061 | if (GET_CODE (note) == NOTE | |
5062 | && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG) | |
5063 | break; | |
5064 | } | |
5065 | next = NEXT_INSN (note); | |
5066 | prev = PREV_INSN (note); | |
5067 | if (prev) | |
5068 | NEXT_INSN (prev) = next; | |
5069 | if (next) | |
5070 | PREV_INSN (next) = prev; | |
5071 | add_insn_after (note, PREV_INSN (insn)); | |
5072 | } | |
5073 | } | |
5074 | } | |
5075 | } | |
5076 | #endif /* HAVE_prologue or HAVE_epilogue */ | |
5077 | } |