]>
Commit | Line | Data |
---|---|---|
1 | /* Expands front end tree to back end RTL for GNU C-Compiler | |
2 | Copyright (C) 1987, 1988, 1989, 1991, 1992 Free Software Foundation, Inc. | |
3 | ||
4 | This file is part of GNU CC. | |
5 | ||
6 | GNU CC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8 | the Free Software Foundation; either version 2, or (at your option) | |
9 | any later version. | |
10 | ||
11 | GNU CC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GNU CC; see the file COPYING. If not, write to | |
18 | the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ | |
19 | ||
20 | ||
21 | /* This file handles the generation of rtl code from tree structure | |
22 | at the level of the function as a whole. | |
23 | It creates the rtl expressions for parameters and auto variables | |
24 | and has full responsibility for allocating stack slots. | |
25 | ||
26 | `expand_function_start' is called at the beginning of a function, | |
27 | before the function body is parsed, and `expand_function_end' is | |
28 | called after parsing the body. | |
29 | ||
30 | Call `assign_stack_local' to allocate a stack slot for a local variable. | |
31 | This is usually done during the RTL generation for the function body, | |
32 | but it can also be done in the reload pass when a pseudo-register does | |
33 | not get a hard register. | |
34 | ||
35 | Call `put_var_into_stack' when you learn, belatedly, that a variable | |
36 | previously given a pseudo-register must in fact go in the stack. | |
37 | This function changes the DECL_RTL to be a stack slot instead of a reg | |
38 | then scans all the RTL instructions so far generated to correct them. */ | |
39 | ||
40 | #include "config.h" | |
41 | ||
42 | #include <stdio.h> | |
43 | ||
44 | #include "rtl.h" | |
45 | #include "tree.h" | |
46 | #include "flags.h" | |
47 | #include "function.h" | |
48 | #include "insn-flags.h" | |
49 | #include "expr.h" | |
50 | #include "insn-codes.h" | |
51 | #include "regs.h" | |
52 | #include "hard-reg-set.h" | |
53 | #include "insn-config.h" | |
54 | #include "recog.h" | |
55 | #include "output.h" | |
56 | #include "basic-block.h" | |
57 | ||
58 | /* Round a value to the lowest integer less than it that is a multiple of | |
59 | the required alignment. Avoid using division in case the value is | |
60 | negative. Assume the alignment is a power of two. */ | |
61 | #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1)) | |
62 | ||
63 | /* Similar, but round to the next highest integer that meets the | |
64 | alignment. */ | |
65 | #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1)) | |
66 | ||
67 | /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp | |
68 | during rtl generation. If they are different register numbers, this is | |
69 | always true. It may also be true if | |
70 | FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl | |
71 | generation. See fix_lexical_addr for details. */ | |
72 | ||
73 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM | |
74 | #define NEED_SEPARATE_AP | |
75 | #endif | |
76 | ||
77 | /* Number of bytes of args popped by function being compiled on its return. | |
78 | Zero if no bytes are to be popped. | |
79 | May affect compilation of return insn or of function epilogue. */ | |
80 | ||
81 | int current_function_pops_args; | |
82 | ||
83 | /* Nonzero if function being compiled needs to be given an address | |
84 | where the value should be stored. */ | |
85 | ||
86 | int current_function_returns_struct; | |
87 | ||
88 | /* Nonzero if function being compiled needs to | |
89 | return the address of where it has put a structure value. */ | |
90 | ||
91 | int current_function_returns_pcc_struct; | |
92 | ||
93 | /* Nonzero if function being compiled needs to be passed a static chain. */ | |
94 | ||
95 | int current_function_needs_context; | |
96 | ||
97 | /* Nonzero if function being compiled can call setjmp. */ | |
98 | ||
99 | int current_function_calls_setjmp; | |
100 | ||
101 | /* Nonzero if function being compiled can call longjmp. */ | |
102 | ||
103 | int current_function_calls_longjmp; | |
104 | ||
105 | /* Nonzero if function being compiled receives nonlocal gotos | |
106 | from nested functions. */ | |
107 | ||
108 | int current_function_has_nonlocal_label; | |
109 | ||
110 | /* Nonzero if function being compiled contains nested functions. */ | |
111 | ||
112 | int current_function_contains_functions; | |
113 | ||
114 | /* Nonzero if function being compiled can call alloca, | |
115 | either as a subroutine or builtin. */ | |
116 | ||
117 | int current_function_calls_alloca; | |
118 | ||
119 | /* Nonzero if the current function returns a pointer type */ | |
120 | ||
121 | int current_function_returns_pointer; | |
122 | ||
123 | /* If some insns can be deferred to the delay slots of the epilogue, the | |
124 | delay list for them is recorded here. */ | |
125 | ||
126 | rtx current_function_epilogue_delay_list; | |
127 | ||
128 | /* If function's args have a fixed size, this is that size, in bytes. | |
129 | Otherwise, it is -1. | |
130 | May affect compilation of return insn or of function epilogue. */ | |
131 | ||
132 | int current_function_args_size; | |
133 | ||
134 | /* # bytes the prologue should push and pretend that the caller pushed them. | |
135 | The prologue must do this, but only if parms can be passed in registers. */ | |
136 | ||
137 | int current_function_pretend_args_size; | |
138 | ||
139 | /* # of bytes of outgoing arguments required to be pushed by the prologue. | |
140 | If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined | |
141 | and no stack adjusts will be done on function calls. */ | |
142 | ||
143 | int current_function_outgoing_args_size; | |
144 | ||
145 | /* This is the offset from the arg pointer to the place where the first | |
146 | anonymous arg can be found, if there is one. */ | |
147 | ||
148 | rtx current_function_arg_offset_rtx; | |
149 | ||
150 | /* Nonzero if current function uses varargs.h or equivalent. | |
151 | Zero for functions that use stdarg.h. */ | |
152 | ||
153 | int current_function_varargs; | |
154 | ||
155 | /* Quantities of various kinds of registers | |
156 | used for the current function's args. */ | |
157 | ||
158 | CUMULATIVE_ARGS current_function_args_info; | |
159 | ||
160 | /* Name of function now being compiled. */ | |
161 | ||
162 | char *current_function_name; | |
163 | ||
164 | /* If non-zero, an RTL expression for that location at which the current | |
165 | function returns its result. Always equal to | |
166 | DECL_RTL (DECL_RESULT (current_function_decl)), but provided | |
167 | independently of the tree structures. */ | |
168 | ||
169 | rtx current_function_return_rtx; | |
170 | ||
171 | /* Nonzero if the current function uses the constant pool. */ | |
172 | ||
173 | int current_function_uses_const_pool; | |
174 | ||
175 | /* Nonzero if the current function uses pic_offset_table_rtx. */ | |
176 | int current_function_uses_pic_offset_table; | |
177 | ||
178 | /* The arg pointer hard register, or the pseudo into which it was copied. */ | |
179 | rtx current_function_internal_arg_pointer; | |
180 | ||
181 | /* The FUNCTION_DECL for an inline function currently being expanded. */ | |
182 | tree inline_function_decl; | |
183 | ||
184 | /* Number of function calls seen so far in current function. */ | |
185 | ||
186 | int function_call_count; | |
187 | ||
188 | /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels | |
189 | (labels to which there can be nonlocal gotos from nested functions) | |
190 | in this function. */ | |
191 | ||
192 | tree nonlocal_labels; | |
193 | ||
194 | /* RTX for stack slot that holds the current handler for nonlocal gotos. | |
195 | Zero when function does not have nonlocal labels. */ | |
196 | ||
197 | rtx nonlocal_goto_handler_slot; | |
198 | ||
199 | /* RTX for stack slot that holds the stack pointer value to restore | |
200 | for a nonlocal goto. | |
201 | Zero when function does not have nonlocal labels. */ | |
202 | ||
203 | rtx nonlocal_goto_stack_level; | |
204 | ||
205 | /* Label that will go on parm cleanup code, if any. | |
206 | Jumping to this label runs cleanup code for parameters, if | |
207 | such code must be run. Following this code is the logical return label. */ | |
208 | ||
209 | rtx cleanup_label; | |
210 | ||
211 | /* Label that will go on function epilogue. | |
212 | Jumping to this label serves as a "return" instruction | |
213 | on machines which require execution of the epilogue on all returns. */ | |
214 | ||
215 | rtx return_label; | |
216 | ||
217 | /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs. | |
218 | So we can mark them all live at the end of the function, if nonopt. */ | |
219 | rtx save_expr_regs; | |
220 | ||
221 | /* List (chain of EXPR_LISTs) of all stack slots in this function. | |
222 | Made for the sake of unshare_all_rtl. */ | |
223 | rtx stack_slot_list; | |
224 | ||
225 | /* Chain of all RTL_EXPRs that have insns in them. */ | |
226 | tree rtl_expr_chain; | |
227 | ||
228 | /* Label to jump back to for tail recursion, or 0 if we have | |
229 | not yet needed one for this function. */ | |
230 | rtx tail_recursion_label; | |
231 | ||
232 | /* Place after which to insert the tail_recursion_label if we need one. */ | |
233 | rtx tail_recursion_reentry; | |
234 | ||
235 | /* Location at which to save the argument pointer if it will need to be | |
236 | referenced. There are two cases where this is done: if nonlocal gotos | |
237 | exist, or if vars stored at an offset from the argument pointer will be | |
238 | needed by inner routines. */ | |
239 | ||
240 | rtx arg_pointer_save_area; | |
241 | ||
242 | /* Offset to end of allocated area of stack frame. | |
243 | If stack grows down, this is the address of the last stack slot allocated. | |
244 | If stack grows up, this is the address for the next slot. */ | |
245 | int frame_offset; | |
246 | ||
247 | /* List (chain of TREE_LISTs) of static chains for containing functions. | |
248 | Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx | |
249 | in an RTL_EXPR in the TREE_VALUE. */ | |
250 | static tree context_display; | |
251 | ||
252 | /* List (chain of TREE_LISTs) of trampolines for nested functions. | |
253 | The trampoline sets up the static chain and jumps to the function. | |
254 | We supply the trampoline's address when the function's address is requested. | |
255 | ||
256 | Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx | |
257 | in an RTL_EXPR in the TREE_VALUE. */ | |
258 | static tree trampoline_list; | |
259 | ||
260 | /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */ | |
261 | static rtx parm_birth_insn; | |
262 | ||
263 | #if 0 | |
264 | /* Nonzero if a stack slot has been generated whose address is not | |
265 | actually valid. It means that the generated rtl must all be scanned | |
266 | to detect and correct the invalid addresses where they occur. */ | |
267 | static int invalid_stack_slot; | |
268 | #endif | |
269 | ||
270 | /* Last insn of those whose job was to put parms into their nominal homes. */ | |
271 | static rtx last_parm_insn; | |
272 | ||
273 | /* 1 + last pseudo register number used for loading a copy | |
274 | of a parameter of this function. */ | |
275 | static int max_parm_reg; | |
276 | ||
277 | /* Vector indexed by REGNO, containing location on stack in which | |
278 | to put the parm which is nominally in pseudo register REGNO, | |
279 | if we discover that that parm must go in the stack. */ | |
280 | static rtx *parm_reg_stack_loc; | |
281 | ||
282 | #if 0 /* Turned off because 0 seems to work just as well. */ | |
283 | /* Cleanup lists are required for binding levels regardless of whether | |
284 | that binding level has cleanups or not. This node serves as the | |
285 | cleanup list whenever an empty list is required. */ | |
286 | static tree empty_cleanup_list; | |
287 | #endif | |
288 | ||
289 | /* Nonzero once virtual register instantiation has been done. | |
290 | assign_stack_local uses frame_pointer_rtx when this is nonzero. */ | |
291 | static int virtuals_instantiated; | |
292 | ||
293 | /* Nonzero if we need to distinguish between the return value of this function | |
294 | and the return value of a function called by this function. This helps | |
295 | integrate.c */ | |
296 | ||
297 | extern int rtx_equal_function_value_matters; | |
298 | ||
299 | void fixup_gotos (); | |
300 | ||
301 | static tree round_down (); | |
302 | static rtx round_trampoline_addr (); | |
303 | static rtx fixup_stack_1 (); | |
304 | static void fixup_var_refs (); | |
305 | static void fixup_var_refs_insns (); | |
306 | static void fixup_var_refs_1 (); | |
307 | static void optimize_bit_field (); | |
308 | static void instantiate_decls (); | |
309 | static void instantiate_decls_1 (); | |
310 | static void instantiate_decl (); | |
311 | static int instantiate_virtual_regs_1 (); | |
312 | static rtx fixup_memory_subreg (); | |
313 | static rtx walk_fixup_memory_subreg (); | |
314 | \f | |
315 | /* In order to evaluate some expressions, such as function calls returning | |
316 | structures in memory, we need to temporarily allocate stack locations. | |
317 | We record each allocated temporary in the following structure. | |
318 | ||
319 | Associated with each temporary slot is a nesting level. When we pop up | |
320 | one level, all temporaries associated with the previous level are freed. | |
321 | Normally, all temporaries are freed after the execution of the statement | |
322 | in which they were created. However, if we are inside a ({...}) grouping, | |
323 | the result may be in a temporary and hence must be preserved. If the | |
324 | result could be in a temporary, we preserve it if we can determine which | |
325 | one it is in. If we cannot determine which temporary may contain the | |
326 | result, all temporaries are preserved. A temporary is preserved by | |
327 | pretending it was allocated at the previous nesting level. | |
328 | ||
329 | Automatic variables are also assigned temporary slots, at the nesting | |
330 | level where they are defined. They are marked a "kept" so that | |
331 | free_temp_slots will not free them. */ | |
332 | ||
333 | struct temp_slot | |
334 | { | |
335 | /* Points to next temporary slot. */ | |
336 | struct temp_slot *next; | |
337 | /* The rtx to used to reference the slot. */ | |
338 | rtx slot; | |
339 | /* The size, in units, of the slot. */ | |
340 | int size; | |
341 | /* Non-zero if this temporary is currently in use. */ | |
342 | char in_use; | |
343 | /* Nesting level at which this slot is being used. */ | |
344 | int level; | |
345 | /* Non-zero if this should survive a call to free_temp_slots. */ | |
346 | int keep; | |
347 | }; | |
348 | ||
349 | /* List of all temporaries allocated, both available and in use. */ | |
350 | ||
351 | struct temp_slot *temp_slots; | |
352 | ||
353 | /* Current nesting level for temporaries. */ | |
354 | ||
355 | int temp_slot_level; | |
356 | \f | |
357 | /* Pointer to chain of `struct function' for containing functions. */ | |
358 | struct function *outer_function_chain; | |
359 | ||
360 | /* Given a function decl for a containing function, | |
361 | return the `struct function' for it. */ | |
362 | ||
363 | struct function * | |
364 | find_function_data (decl) | |
365 | tree decl; | |
366 | { | |
367 | struct function *p; | |
368 | for (p = outer_function_chain; p; p = p->next) | |
369 | if (p->decl == decl) | |
370 | return p; | |
371 | abort (); | |
372 | } | |
373 | ||
374 | /* Save the current context for compilation of a nested function. | |
375 | This is called from language-specific code. | |
376 | The caller is responsible for saving any language-specific status, | |
377 | since this function knows only about language-independent variables. */ | |
378 | ||
379 | void | |
380 | push_function_context () | |
381 | { | |
382 | struct function *p = (struct function *) xmalloc (sizeof (struct function)); | |
383 | ||
384 | p->next = outer_function_chain; | |
385 | outer_function_chain = p; | |
386 | ||
387 | p->name = current_function_name; | |
388 | p->decl = current_function_decl; | |
389 | p->pops_args = current_function_pops_args; | |
390 | p->returns_struct = current_function_returns_struct; | |
391 | p->returns_pcc_struct = current_function_returns_pcc_struct; | |
392 | p->needs_context = current_function_needs_context; | |
393 | p->calls_setjmp = current_function_calls_setjmp; | |
394 | p->calls_longjmp = current_function_calls_longjmp; | |
395 | p->calls_alloca = current_function_calls_alloca; | |
396 | p->has_nonlocal_label = current_function_has_nonlocal_label; | |
397 | p->args_size = current_function_args_size; | |
398 | p->pretend_args_size = current_function_pretend_args_size; | |
399 | p->arg_offset_rtx = current_function_arg_offset_rtx; | |
400 | p->uses_const_pool = current_function_uses_const_pool; | |
401 | p->uses_pic_offset_table = current_function_uses_pic_offset_table; | |
402 | p->internal_arg_pointer = current_function_internal_arg_pointer; | |
403 | p->max_parm_reg = max_parm_reg; | |
404 | p->parm_reg_stack_loc = parm_reg_stack_loc; | |
405 | p->outgoing_args_size = current_function_outgoing_args_size; | |
406 | p->return_rtx = current_function_return_rtx; | |
407 | p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot; | |
408 | p->nonlocal_goto_stack_level = nonlocal_goto_stack_level; | |
409 | p->nonlocal_labels = nonlocal_labels; | |
410 | p->cleanup_label = cleanup_label; | |
411 | p->return_label = return_label; | |
412 | p->save_expr_regs = save_expr_regs; | |
413 | p->stack_slot_list = stack_slot_list; | |
414 | p->parm_birth_insn = parm_birth_insn; | |
415 | p->frame_offset = frame_offset; | |
416 | p->tail_recursion_label = tail_recursion_label; | |
417 | p->tail_recursion_reentry = tail_recursion_reentry; | |
418 | p->arg_pointer_save_area = arg_pointer_save_area; | |
419 | p->rtl_expr_chain = rtl_expr_chain; | |
420 | p->last_parm_insn = last_parm_insn; | |
421 | p->context_display = context_display; | |
422 | p->trampoline_list = trampoline_list; | |
423 | p->function_call_count = function_call_count; | |
424 | p->temp_slots = temp_slots; | |
425 | p->temp_slot_level = temp_slot_level; | |
426 | p->fixup_var_refs_queue = 0; | |
427 | p->epilogue_delay_list = current_function_epilogue_delay_list; | |
428 | ||
429 | save_tree_status (p); | |
430 | save_storage_status (p); | |
431 | save_emit_status (p); | |
432 | init_emit (); | |
433 | save_expr_status (p); | |
434 | save_stmt_status (p); | |
435 | save_varasm_status (p); | |
436 | } | |
437 | ||
438 | /* Restore the last saved context, at the end of a nested function. | |
439 | This function is called from language-specific code. */ | |
440 | ||
441 | void | |
442 | pop_function_context () | |
443 | { | |
444 | struct function *p = outer_function_chain; | |
445 | ||
446 | outer_function_chain = p->next; | |
447 | ||
448 | current_function_name = p->name; | |
449 | current_function_decl = p->decl; | |
450 | current_function_pops_args = p->pops_args; | |
451 | current_function_returns_struct = p->returns_struct; | |
452 | current_function_returns_pcc_struct = p->returns_pcc_struct; | |
453 | current_function_needs_context = p->needs_context; | |
454 | current_function_calls_setjmp = p->calls_setjmp; | |
455 | current_function_calls_longjmp = p->calls_longjmp; | |
456 | current_function_calls_alloca = p->calls_alloca; | |
457 | current_function_has_nonlocal_label = p->has_nonlocal_label; | |
458 | current_function_contains_functions = 1; | |
459 | current_function_args_size = p->args_size; | |
460 | current_function_pretend_args_size = p->pretend_args_size; | |
461 | current_function_arg_offset_rtx = p->arg_offset_rtx; | |
462 | current_function_uses_const_pool = p->uses_const_pool; | |
463 | current_function_uses_pic_offset_table = p->uses_pic_offset_table; | |
464 | current_function_internal_arg_pointer = p->internal_arg_pointer; | |
465 | max_parm_reg = p->max_parm_reg; | |
466 | parm_reg_stack_loc = p->parm_reg_stack_loc; | |
467 | current_function_outgoing_args_size = p->outgoing_args_size; | |
468 | current_function_return_rtx = p->return_rtx; | |
469 | nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot; | |
470 | nonlocal_goto_stack_level = p->nonlocal_goto_stack_level; | |
471 | nonlocal_labels = p->nonlocal_labels; | |
472 | cleanup_label = p->cleanup_label; | |
473 | return_label = p->return_label; | |
474 | save_expr_regs = p->save_expr_regs; | |
475 | stack_slot_list = p->stack_slot_list; | |
476 | parm_birth_insn = p->parm_birth_insn; | |
477 | frame_offset = p->frame_offset; | |
478 | tail_recursion_label = p->tail_recursion_label; | |
479 | tail_recursion_reentry = p->tail_recursion_reentry; | |
480 | arg_pointer_save_area = p->arg_pointer_save_area; | |
481 | rtl_expr_chain = p->rtl_expr_chain; | |
482 | last_parm_insn = p->last_parm_insn; | |
483 | context_display = p->context_display; | |
484 | trampoline_list = p->trampoline_list; | |
485 | function_call_count = p->function_call_count; | |
486 | temp_slots = p->temp_slots; | |
487 | temp_slot_level = p->temp_slot_level; | |
488 | current_function_epilogue_delay_list = p->epilogue_delay_list; | |
489 | ||
490 | restore_tree_status (p); | |
491 | restore_storage_status (p); | |
492 | restore_expr_status (p); | |
493 | restore_emit_status (p); | |
494 | restore_stmt_status (p); | |
495 | restore_varasm_status (p); | |
496 | ||
497 | /* Finish doing put_var_into_stack for any of our variables | |
498 | which became addressable during the nested function. */ | |
499 | { | |
500 | struct var_refs_queue *queue = p->fixup_var_refs_queue; | |
501 | for (; queue; queue = queue->next) | |
502 | fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp); | |
503 | } | |
504 | ||
505 | free (p); | |
506 | ||
507 | /* Reset variables that have known state during rtx generation. */ | |
508 | rtx_equal_function_value_matters = 1; | |
509 | virtuals_instantiated = 0; | |
510 | } | |
511 | \f | |
512 | /* Allocate fixed slots in the stack frame of the current function. */ | |
513 | ||
514 | /* Return size needed for stack frame based on slots so far allocated. | |
515 | This size counts from zero. It is not rounded to STACK_BOUNDARY; | |
516 | the caller may have to do that. */ | |
517 | ||
518 | int | |
519 | get_frame_size () | |
520 | { | |
521 | #ifdef FRAME_GROWS_DOWNWARD | |
522 | return -frame_offset; | |
523 | #else | |
524 | return frame_offset; | |
525 | #endif | |
526 | } | |
527 | ||
528 | /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it | |
529 | with machine mode MODE. | |
530 | ||
531 | ALIGN controls the amount of alignment for the address of the slot: | |
532 | 0 means according to MODE, | |
533 | -1 means use BIGGEST_ALIGNMENT and round size to multiple of that, | |
534 | positive specifies alignment boundary in bits. | |
535 | ||
536 | We do not round to stack_boundary here. */ | |
537 | ||
538 | rtx | |
539 | assign_stack_local (mode, size, align) | |
540 | enum machine_mode mode; | |
541 | int size; | |
542 | int align; | |
543 | { | |
544 | register rtx x, addr; | |
545 | int bigend_correction = 0; | |
546 | int alignment; | |
547 | ||
548 | if (align == 0) | |
549 | { | |
550 | alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
551 | if (mode == BLKmode) | |
552 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
553 | } | |
554 | else if (align == -1) | |
555 | { | |
556 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
557 | size = CEIL_ROUND (size, alignment); | |
558 | } | |
559 | else | |
560 | alignment = align / BITS_PER_UNIT; | |
561 | ||
562 | /* Round frame offset to that alignment. | |
563 | We must be careful here, since FRAME_OFFSET might be negative and | |
564 | division with a negative dividend isn't as well defined as we might | |
565 | like. So we instead assume that ALIGNMENT is a power of two and | |
566 | use logical operations which are unambiguous. */ | |
567 | #ifdef FRAME_GROWS_DOWNWARD | |
568 | frame_offset = FLOOR_ROUND (frame_offset, alignment); | |
569 | #else | |
570 | frame_offset = CEIL_ROUND (frame_offset, alignment); | |
571 | #endif | |
572 | ||
573 | /* On a big-endian machine, if we are allocating more space than we will use, | |
574 | use the least significant bytes of those that are allocated. */ | |
575 | #if BYTES_BIG_ENDIAN | |
576 | if (mode != BLKmode) | |
577 | bigend_correction = size - GET_MODE_SIZE (mode); | |
578 | #endif | |
579 | ||
580 | #ifdef FRAME_GROWS_DOWNWARD | |
581 | frame_offset -= size; | |
582 | #endif | |
583 | ||
584 | /* If we have already instantiated virtual registers, return the actual | |
585 | address relative to the frame pointer. */ | |
586 | if (virtuals_instantiated) | |
587 | addr = plus_constant (frame_pointer_rtx, | |
588 | (frame_offset + bigend_correction | |
589 | + STARTING_FRAME_OFFSET)); | |
590 | else | |
591 | addr = plus_constant (virtual_stack_vars_rtx, | |
592 | frame_offset + bigend_correction); | |
593 | ||
594 | #ifndef FRAME_GROWS_DOWNWARD | |
595 | frame_offset += size; | |
596 | #endif | |
597 | ||
598 | x = gen_rtx (MEM, mode, addr); | |
599 | ||
600 | stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list); | |
601 | ||
602 | return x; | |
603 | } | |
604 | ||
605 | /* Assign a stack slot in a containing function. | |
606 | First three arguments are same as in preceding function. | |
607 | The last argument specifies the function to allocate in. */ | |
608 | ||
609 | rtx | |
610 | assign_outer_stack_local (mode, size, align, function) | |
611 | enum machine_mode mode; | |
612 | int size; | |
613 | int align; | |
614 | struct function *function; | |
615 | { | |
616 | register rtx x, addr; | |
617 | int bigend_correction = 0; | |
618 | int alignment; | |
619 | ||
620 | /* Allocate in the memory associated with the function in whose frame | |
621 | we are assigning. */ | |
622 | push_obstacks (function->function_obstack, | |
623 | function->function_maybepermanent_obstack); | |
624 | ||
625 | if (align == 0) | |
626 | { | |
627 | alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; | |
628 | if (mode == BLKmode) | |
629 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
630 | } | |
631 | else if (align == -1) | |
632 | { | |
633 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
634 | size = CEIL_ROUND (size, alignment); | |
635 | } | |
636 | else | |
637 | alignment = align / BITS_PER_UNIT; | |
638 | ||
639 | /* Round frame offset to that alignment. */ | |
640 | #ifdef FRAME_GROWS_DOWNWARD | |
641 | function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment); | |
642 | #else | |
643 | function->frame_offset = CEIL_ROUND (function->frame_offset, alignment); | |
644 | #endif | |
645 | ||
646 | /* On a big-endian machine, if we are allocating more space than we will use, | |
647 | use the least significant bytes of those that are allocated. */ | |
648 | #if BYTES_BIG_ENDIAN | |
649 | if (mode != BLKmode) | |
650 | bigend_correction = size - GET_MODE_SIZE (mode); | |
651 | #endif | |
652 | ||
653 | #ifdef FRAME_GROWS_DOWNWARD | |
654 | function->frame_offset -= size; | |
655 | #endif | |
656 | addr = plus_constant (virtual_stack_vars_rtx, | |
657 | function->frame_offset + bigend_correction); | |
658 | #ifndef FRAME_GROWS_DOWNWARD | |
659 | function->frame_offset += size; | |
660 | #endif | |
661 | ||
662 | x = gen_rtx (MEM, mode, addr); | |
663 | ||
664 | function->stack_slot_list | |
665 | = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list); | |
666 | ||
667 | pop_obstacks (); | |
668 | ||
669 | return x; | |
670 | } | |
671 | \f | |
672 | /* Allocate a temporary stack slot and record it for possible later | |
673 | reuse. | |
674 | ||
675 | MODE is the machine mode to be given to the returned rtx. | |
676 | ||
677 | SIZE is the size in units of the space required. We do no rounding here | |
678 | since assign_stack_local will do any required rounding. | |
679 | ||
680 | KEEP is non-zero if this slot is to be retained after a call to | |
681 | free_temp_slots. Automatic variables for a block are allocated with this | |
682 | flag. */ | |
683 | ||
684 | rtx | |
685 | assign_stack_temp (mode, size, keep) | |
686 | enum machine_mode mode; | |
687 | int size; | |
688 | int keep; | |
689 | { | |
690 | struct temp_slot *p, *best_p = 0; | |
691 | ||
692 | /* First try to find an available, already-allocated temporary that is the | |
693 | exact size we require. */ | |
694 | for (p = temp_slots; p; p = p->next) | |
695 | if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use) | |
696 | break; | |
697 | ||
698 | /* If we didn't find, one, try one that is larger than what we want. We | |
699 | find the smallest such. */ | |
700 | if (p == 0) | |
701 | for (p = temp_slots; p; p = p->next) | |
702 | if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use | |
703 | && (best_p == 0 || best_p->size > p->size)) | |
704 | best_p = p; | |
705 | ||
706 | /* Make our best, if any, the one to use. */ | |
707 | if (best_p) | |
708 | p = best_p; | |
709 | ||
710 | /* If we still didn't find one, make a new temporary. */ | |
711 | if (p == 0) | |
712 | { | |
713 | p = (struct temp_slot *) oballoc (sizeof (struct temp_slot)); | |
714 | p->size = size; | |
715 | /* If the temp slot mode doesn't indicate the alignment, | |
716 | use the largest possible, so no one will be disappointed. */ | |
717 | p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0); | |
718 | p->next = temp_slots; | |
719 | temp_slots = p; | |
720 | } | |
721 | ||
722 | p->in_use = 1; | |
723 | p->level = temp_slot_level; | |
724 | p->keep = keep; | |
725 | return p->slot; | |
726 | } | |
727 | \f | |
728 | /* If X could be a reference to a temporary slot, mark that slot as belonging | |
729 | to the to one level higher. If X matched one of our slots, just mark that | |
730 | one. Otherwise, we can't easily predict which it is, so upgrade all of | |
731 | them. Kept slots need not be touched. | |
732 | ||
733 | This is called when an ({...}) construct occurs and a statement | |
734 | returns a value in memory. */ | |
735 | ||
736 | void | |
737 | preserve_temp_slots (x) | |
738 | rtx x; | |
739 | { | |
740 | struct temp_slot *p; | |
741 | ||
742 | /* If X is not in memory or is at a constant address, it cannot be in | |
743 | a temporary slot. */ | |
744 | if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))) | |
745 | return; | |
746 | ||
747 | /* First see if we can find a match. */ | |
748 | for (p = temp_slots; p; p = p->next) | |
749 | if (p->in_use && x == p->slot) | |
750 | { | |
751 | p->level--; | |
752 | return; | |
753 | } | |
754 | ||
755 | /* Otherwise, preserve all non-kept slots at this level. */ | |
756 | for (p = temp_slots; p; p = p->next) | |
757 | if (p->in_use && p->level == temp_slot_level && ! p->keep) | |
758 | p->level--; | |
759 | } | |
760 | ||
761 | /* Free all temporaries used so far. This is normally called at the end | |
762 | of generating code for a statement. */ | |
763 | ||
764 | void | |
765 | free_temp_slots () | |
766 | { | |
767 | struct temp_slot *p; | |
768 | ||
769 | for (p = temp_slots; p; p = p->next) | |
770 | if (p->in_use && p->level == temp_slot_level && ! p->keep) | |
771 | p->in_use = 0; | |
772 | } | |
773 | ||
774 | /* Push deeper into the nesting level for stack temporaries. */ | |
775 | ||
776 | void | |
777 | push_temp_slots () | |
778 | { | |
779 | /* For GNU C++, we must allow a sequence to be emitted anywhere in | |
780 | the level where the sequence was started. By not changing levels | |
781 | when the compiler is inside a sequence, the temporaries for the | |
782 | sequence and the temporaries will not unwittingly conflict with | |
783 | the temporaries for other sequences and/or code at that level. */ | |
784 | if (in_sequence_p ()) | |
785 | return; | |
786 | ||
787 | temp_slot_level++; | |
788 | } | |
789 | ||
790 | /* Pop a temporary nesting level. All slots in use in the current level | |
791 | are freed. */ | |
792 | ||
793 | void | |
794 | pop_temp_slots () | |
795 | { | |
796 | struct temp_slot *p; | |
797 | ||
798 | /* See comment in push_temp_slots about why we don't change levels | |
799 | in sequences. */ | |
800 | if (in_sequence_p ()) | |
801 | return; | |
802 | ||
803 | for (p = temp_slots; p; p = p->next) | |
804 | if (p->in_use && p->level == temp_slot_level) | |
805 | p->in_use = 0; | |
806 | ||
807 | temp_slot_level--; | |
808 | } | |
809 | \f | |
810 | /* Retroactively move an auto variable from a register to a stack slot. | |
811 | This is done when an address-reference to the variable is seen. */ | |
812 | ||
813 | void | |
814 | put_var_into_stack (decl) | |
815 | tree decl; | |
816 | { | |
817 | register rtx reg; | |
818 | register rtx new = 0; | |
819 | enum machine_mode promoted_mode, decl_mode; | |
820 | struct function *function = 0; | |
821 | tree context = decl_function_context (decl); | |
822 | ||
823 | /* Get the current rtl used for this object and it's original mode. */ | |
824 | reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl); | |
825 | ||
826 | /* No need to do anything if decl has no rtx yet | |
827 | since in that case caller is setting TREE_ADDRESSABLE | |
828 | and a stack slot will be assigned when the rtl is made. */ | |
829 | if (reg == 0) | |
830 | return; | |
831 | ||
832 | /* Get the declared mode for this object. */ | |
833 | decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl)) | |
834 | : DECL_MODE (decl)); | |
835 | /* Get the mode it's actually stored in. */ | |
836 | promoted_mode = GET_MODE (reg); | |
837 | ||
838 | /* If this variable comes from an outer function, | |
839 | find that function's saved context. */ | |
840 | if (context != current_function_decl) | |
841 | for (function = outer_function_chain; function; function = function->next) | |
842 | if (function->decl == context) | |
843 | break; | |
844 | ||
845 | /* If this is a variable-size object with a pseudo to address it, | |
846 | put that pseudo into the stack, if the var is nonlocal. */ | |
847 | if (DECL_NONLOCAL (decl) | |
848 | && GET_CODE (reg) == MEM | |
849 | && GET_CODE (XEXP (reg, 0)) == REG | |
850 | && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER) | |
851 | { | |
852 | reg = XEXP (reg, 0); | |
853 | decl_mode = promoted_mode = GET_MODE (reg); | |
854 | } | |
855 | ||
856 | if (GET_CODE (reg) != REG) | |
857 | return; | |
858 | ||
859 | if (function) | |
860 | { | |
861 | if (REGNO (reg) < function->max_parm_reg) | |
862 | new = function->parm_reg_stack_loc[REGNO (reg)]; | |
863 | if (new == 0) | |
864 | new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), | |
865 | 0, function); | |
866 | } | |
867 | else | |
868 | { | |
869 | if (REGNO (reg) < max_parm_reg) | |
870 | new = parm_reg_stack_loc[REGNO (reg)]; | |
871 | if (new == 0) | |
872 | new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0); | |
873 | } | |
874 | ||
875 | XEXP (reg, 0) = XEXP (new, 0); | |
876 | /* `volatil' bit means one thing for MEMs, another entirely for REGs. */ | |
877 | REG_USERVAR_P (reg) = 0; | |
878 | PUT_CODE (reg, MEM); | |
879 | PUT_MODE (reg, decl_mode); | |
880 | ||
881 | /* If this is a memory ref that contains aggregate components, | |
882 | mark it as such for cse and loop optimize. */ | |
883 | MEM_IN_STRUCT_P (reg) | |
884 | = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE | |
885 | || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE | |
886 | || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE | |
887 | || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE); | |
888 | ||
889 | /* Now make sure that all refs to the variable, previously made | |
890 | when it was a register, are fixed up to be valid again. */ | |
891 | if (function) | |
892 | { | |
893 | struct var_refs_queue *temp; | |
894 | ||
895 | /* Variable is inherited; fix it up when we get back to its function. */ | |
896 | push_obstacks (function->function_obstack, | |
897 | function->function_maybepermanent_obstack); | |
898 | temp | |
899 | = (struct var_refs_queue *) oballoc (sizeof (struct var_refs_queue)); | |
900 | temp->modified = reg; | |
901 | temp->promoted_mode = promoted_mode; | |
902 | temp->unsignedp = TREE_UNSIGNED (TREE_TYPE (decl)); | |
903 | temp->next = function->fixup_var_refs_queue; | |
904 | function->fixup_var_refs_queue = temp; | |
905 | pop_obstacks (); | |
906 | } | |
907 | else | |
908 | /* Variable is local; fix it up now. */ | |
909 | fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (TREE_TYPE (decl))); | |
910 | } | |
911 | \f | |
912 | static void | |
913 | fixup_var_refs (var, promoted_mode, unsignedp) | |
914 | rtx var; | |
915 | enum machine_mode promoted_mode; | |
916 | int unsignedp; | |
917 | { | |
918 | tree pending; | |
919 | rtx first_insn = get_insns (); | |
920 | struct sequence_stack *stack = sequence_stack; | |
921 | tree rtl_exps = rtl_expr_chain; | |
922 | ||
923 | /* Must scan all insns for stack-refs that exceed the limit. */ | |
924 | fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0); | |
925 | ||
926 | /* Scan all pending sequences too. */ | |
927 | for (; stack; stack = stack->next) | |
928 | { | |
929 | push_to_sequence (stack->first); | |
930 | fixup_var_refs_insns (var, promoted_mode, unsignedp, | |
931 | stack->first, stack->next != 0); | |
932 | /* Update remembered end of sequence | |
933 | in case we added an insn at the end. */ | |
934 | stack->last = get_last_insn (); | |
935 | end_sequence (); | |
936 | } | |
937 | ||
938 | /* Scan all waiting RTL_EXPRs too. */ | |
939 | for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending)) | |
940 | { | |
941 | rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending)); | |
942 | if (seq != const0_rtx && seq != 0) | |
943 | { | |
944 | push_to_sequence (seq); | |
945 | fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0); | |
946 | end_sequence (); | |
947 | } | |
948 | } | |
949 | } | |
950 | \f | |
951 | /* This structure is used by the following two functions to record MEMs or | |
952 | pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing | |
953 | VAR as an address. We need to maintain this list in case two operands of | |
954 | an insn were required to match; in that case we must ensure we use the | |
955 | same replacement. */ | |
956 | ||
957 | struct fixup_replacement | |
958 | { | |
959 | rtx old; | |
960 | rtx new; | |
961 | struct fixup_replacement *next; | |
962 | }; | |
963 | ||
964 | /* REPLACEMENTS is a pointer to a list of the above structures and X is | |
965 | some part of an insn. Return a struct fixup_replacement whose OLD | |
966 | value is equal to X. Allocate a new structure if no such entry exists. */ | |
967 | ||
968 | static struct fixup_replacement * | |
969 | find_fixup_replacement (replacements, x) | |
970 | struct fixup_replacement **replacements; | |
971 | rtx x; | |
972 | { | |
973 | struct fixup_replacement *p; | |
974 | ||
975 | /* See if we have already replaced this. */ | |
976 | for (p = *replacements; p && p->old != x; p = p->next) | |
977 | ; | |
978 | ||
979 | if (p == 0) | |
980 | { | |
981 | p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement)); | |
982 | p->old = x; | |
983 | p->new = 0; | |
984 | p->next = *replacements; | |
985 | *replacements = p; | |
986 | } | |
987 | ||
988 | return p; | |
989 | } | |
990 | ||
991 | /* Scan the insn-chain starting with INSN for refs to VAR | |
992 | and fix them up. TOPLEVEL is nonzero if this chain is the | |
993 | main chain of insns for the current function. */ | |
994 | ||
995 | static void | |
996 | fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel) | |
997 | rtx var; | |
998 | enum machine_mode promoted_mode; | |
999 | int unsignedp; | |
1000 | rtx insn; | |
1001 | int toplevel; | |
1002 | { | |
1003 | rtx call_dest = 0; | |
1004 | ||
1005 | while (insn) | |
1006 | { | |
1007 | rtx next = NEXT_INSN (insn); | |
1008 | rtx note; | |
1009 | if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
1010 | { | |
1011 | /* The insn to load VAR from a home in the arglist | |
1012 | is now a no-op. When we see it, just delete it. */ | |
1013 | if (toplevel | |
1014 | && GET_CODE (PATTERN (insn)) == SET | |
1015 | && SET_DEST (PATTERN (insn)) == var | |
1016 | /* If this represents the result of an insn group, | |
1017 | don't delete the insn. */ | |
1018 | && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0 | |
1019 | && rtx_equal_p (SET_SRC (PATTERN (insn)), var)) | |
1020 | { | |
1021 | /* In unoptimized compilation, we shouldn't call delete_insn | |
1022 | except in jump.c doing warnings. */ | |
1023 | PUT_CODE (insn, NOTE); | |
1024 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1025 | NOTE_SOURCE_FILE (insn) = 0; | |
1026 | if (insn == last_parm_insn) | |
1027 | last_parm_insn = PREV_INSN (next); | |
1028 | } | |
1029 | else | |
1030 | { | |
1031 | struct fixup_replacement *replacements = 0; | |
1032 | rtx next_insn = NEXT_INSN (insn); | |
1033 | ||
1034 | #ifdef SMALL_REGISTER_CLASSES | |
1035 | /* If the insn that copies the results of a CALL_INSN | |
1036 | into a pseudo now references VAR, we have to use an | |
1037 | intermediate pseudo since we want the life of the | |
1038 | return value register to be only a single insn. | |
1039 | ||
1040 | If we don't use an intermediate pseudo, such things as | |
1041 | address computations to make the address of VAR valid | |
1042 | if it is not can be placed beween the CALL_INSN and INSN. | |
1043 | ||
1044 | To make sure this doesn't happen, we record the destination | |
1045 | of the CALL_INSN and see if the next insn uses both that | |
1046 | and VAR. */ | |
1047 | ||
1048 | if (call_dest != 0 && GET_CODE (insn) == INSN | |
1049 | && reg_mentioned_p (var, PATTERN (insn)) | |
1050 | && reg_mentioned_p (call_dest, PATTERN (insn))) | |
1051 | { | |
1052 | rtx temp = gen_reg_rtx (GET_MODE (call_dest)); | |
1053 | ||
1054 | emit_insn_before (gen_move_insn (temp, call_dest), insn); | |
1055 | ||
1056 | PATTERN (insn) = replace_rtx (PATTERN (insn), | |
1057 | call_dest, temp); | |
1058 | } | |
1059 | ||
1060 | if (GET_CODE (insn) == CALL_INSN | |
1061 | && GET_CODE (PATTERN (insn)) == SET) | |
1062 | call_dest = SET_DEST (PATTERN (insn)); | |
1063 | else if (GET_CODE (insn) == CALL_INSN | |
1064 | && GET_CODE (PATTERN (insn)) == PARALLEL | |
1065 | && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) | |
1066 | call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0)); | |
1067 | else | |
1068 | call_dest = 0; | |
1069 | #endif | |
1070 | ||
1071 | /* See if we have to do anything to INSN now that VAR is in | |
1072 | memory. If it needs to be loaded into a pseudo, use a single | |
1073 | pseudo for the entire insn in case there is a MATCH_DUP | |
1074 | between two operands. We pass a pointer to the head of | |
1075 | a list of struct fixup_replacements. If fixup_var_refs_1 | |
1076 | needs to allocate pseudos or replacement MEMs (for SUBREGs), | |
1077 | it will record them in this list. | |
1078 | ||
1079 | If it allocated a pseudo for any replacement, we copy into | |
1080 | it here. */ | |
1081 | ||
1082 | fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn, | |
1083 | &replacements); | |
1084 | ||
1085 | /* If this is last_parm_insn, and any instructions were output | |
1086 | after it to fix it up, then we must set last_parm_insn to | |
1087 | the last such instruction emitted. */ | |
1088 | if (insn == last_parm_insn) | |
1089 | last_parm_insn = PREV_INSN (next_insn); | |
1090 | ||
1091 | while (replacements) | |
1092 | { | |
1093 | if (GET_CODE (replacements->new) == REG) | |
1094 | { | |
1095 | rtx insert_before; | |
1096 | rtx seq; | |
1097 | ||
1098 | /* OLD might be a (subreg (mem)). */ | |
1099 | if (GET_CODE (replacements->old) == SUBREG) | |
1100 | replacements->old | |
1101 | = fixup_memory_subreg (replacements->old, insn, 0); | |
1102 | else | |
1103 | replacements->old | |
1104 | = fixup_stack_1 (replacements->old, insn); | |
1105 | ||
1106 | /* We can not separate USE insns from the CALL_INSN | |
1107 | that they belong to. If this is a CALL_INSN, insert | |
1108 | the move insn before the USE insns preceding it | |
1109 | instead of immediately before the insn. */ | |
1110 | if (GET_CODE (insn) == CALL_INSN) | |
1111 | { | |
1112 | insert_before = insn; | |
1113 | while (GET_CODE (PREV_INSN (insert_before)) == INSN | |
1114 | && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE) | |
1115 | insert_before = PREV_INSN (insert_before); | |
1116 | } | |
1117 | else | |
1118 | insert_before = insn; | |
1119 | ||
1120 | /* If we are changing the mode, do a conversion. | |
1121 | This might be wasteful, but combine.c will | |
1122 | eliminate much of the waste. */ | |
1123 | ||
1124 | if (GET_MODE (replacements->new) | |
1125 | != GET_MODE (replacements->old)) | |
1126 | { | |
1127 | start_sequence (); | |
1128 | convert_move (replacements->new, | |
1129 | replacements->old, unsignedp); | |
1130 | seq = gen_sequence (); | |
1131 | end_sequence (); | |
1132 | } | |
1133 | else | |
1134 | seq = gen_move_insn (replacements->new, | |
1135 | replacements->old); | |
1136 | ||
1137 | emit_insn_before (seq, insert_before); | |
1138 | } | |
1139 | ||
1140 | replacements = replacements->next; | |
1141 | } | |
1142 | } | |
1143 | ||
1144 | /* Also fix up any invalid exprs in the REG_NOTES of this insn. | |
1145 | But don't touch other insns referred to by reg-notes; | |
1146 | we will get them elsewhere. */ | |
1147 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
1148 | if (GET_CODE (note) != INSN_LIST) | |
1149 | XEXP (note, 0) | |
1150 | = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1); | |
1151 | } | |
1152 | insn = next; | |
1153 | } | |
1154 | } | |
1155 | \f | |
1156 | /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE. | |
1157 | See if the rtx expression at *LOC in INSN needs to be changed. | |
1158 | ||
1159 | REPLACEMENTS is a pointer to a list head that starts out zero, but may | |
1160 | contain a list of original rtx's and replacements. If we find that we need | |
1161 | to modify this insn by replacing a memory reference with a pseudo or by | |
1162 | making a new MEM to implement a SUBREG, we consult that list to see if | |
1163 | we have already chosen a replacement. If none has already been allocated, | |
1164 | we allocate it and update the list. fixup_var_refs_insns will copy VAR | |
1165 | or the SUBREG, as appropriate, to the pseudo. */ | |
1166 | ||
1167 | static void | |
1168 | fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements) | |
1169 | register rtx var; | |
1170 | enum machine_mode promoted_mode; | |
1171 | register rtx *loc; | |
1172 | rtx insn; | |
1173 | struct fixup_replacement **replacements; | |
1174 | { | |
1175 | register int i; | |
1176 | register rtx x = *loc; | |
1177 | RTX_CODE code = GET_CODE (x); | |
1178 | register char *fmt; | |
1179 | register rtx tem, tem1; | |
1180 | struct fixup_replacement *replacement; | |
1181 | ||
1182 | switch (code) | |
1183 | { | |
1184 | case MEM: | |
1185 | if (var == x) | |
1186 | { | |
1187 | /* If we already have a replacement, use it. Otherwise, | |
1188 | try to fix up this address in case it is invalid. */ | |
1189 | ||
1190 | replacement = find_fixup_replacement (replacements, var); | |
1191 | if (replacement->new) | |
1192 | { | |
1193 | *loc = replacement->new; | |
1194 | return; | |
1195 | } | |
1196 | ||
1197 | *loc = replacement->new = x = fixup_stack_1 (x, insn); | |
1198 | ||
1199 | /* Unless we are forcing memory to register or we changed the mode, | |
1200 | we can leave things the way they are if the insn is valid. */ | |
1201 | ||
1202 | INSN_CODE (insn) = -1; | |
1203 | if (! flag_force_mem && GET_MODE (x) == promoted_mode | |
1204 | && recog_memoized (insn) >= 0) | |
1205 | return; | |
1206 | ||
1207 | *loc = replacement->new = gen_reg_rtx (promoted_mode); | |
1208 | return; | |
1209 | } | |
1210 | ||
1211 | /* If X contains VAR, we need to unshare it here so that we update | |
1212 | each occurrence separately. But all identical MEMs in one insn | |
1213 | must be replaced with the same rtx because of the possibility of | |
1214 | MATCH_DUPs. */ | |
1215 | ||
1216 | if (reg_mentioned_p (var, x)) | |
1217 | { | |
1218 | replacement = find_fixup_replacement (replacements, x); | |
1219 | if (replacement->new == 0) | |
1220 | replacement->new = copy_most_rtx (x, var); | |
1221 | ||
1222 | *loc = x = replacement->new; | |
1223 | } | |
1224 | break; | |
1225 | ||
1226 | case REG: | |
1227 | case CC0: | |
1228 | case PC: | |
1229 | case CONST_INT: | |
1230 | case CONST: | |
1231 | case SYMBOL_REF: | |
1232 | case LABEL_REF: | |
1233 | case CONST_DOUBLE: | |
1234 | return; | |
1235 | ||
1236 | case SIGN_EXTRACT: | |
1237 | case ZERO_EXTRACT: | |
1238 | /* Note that in some cases those types of expressions are altered | |
1239 | by optimize_bit_field, and do not survive to get here. */ | |
1240 | if (XEXP (x, 0) == var | |
1241 | || (GET_CODE (XEXP (x, 0)) == SUBREG | |
1242 | && SUBREG_REG (XEXP (x, 0)) == var)) | |
1243 | { | |
1244 | /* Get TEM as a valid MEM in the mode presently in the insn. | |
1245 | ||
1246 | We don't worry about the possibility of MATCH_DUP here; it | |
1247 | is highly unlikely and would be tricky to handle. */ | |
1248 | ||
1249 | tem = XEXP (x, 0); | |
1250 | if (GET_CODE (tem) == SUBREG) | |
1251 | tem = fixup_memory_subreg (tem, insn, 1); | |
1252 | tem = fixup_stack_1 (tem, insn); | |
1253 | ||
1254 | /* Unless we want to load from memory, get TEM into the proper mode | |
1255 | for an extract from memory. This can only be done if the | |
1256 | extract is at a constant position and length. */ | |
1257 | ||
1258 | if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT | |
1259 | && GET_CODE (XEXP (x, 2)) == CONST_INT | |
1260 | && ! mode_dependent_address_p (XEXP (tem, 0)) | |
1261 | && ! MEM_VOLATILE_P (tem)) | |
1262 | { | |
1263 | enum machine_mode wanted_mode = VOIDmode; | |
1264 | enum machine_mode is_mode = GET_MODE (tem); | |
1265 | int width = INTVAL (XEXP (x, 1)); | |
1266 | int pos = INTVAL (XEXP (x, 2)); | |
1267 | ||
1268 | #ifdef HAVE_extzv | |
1269 | if (GET_CODE (x) == ZERO_EXTRACT) | |
1270 | wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1]; | |
1271 | #endif | |
1272 | #ifdef HAVE_extv | |
1273 | if (GET_CODE (x) == SIGN_EXTRACT) | |
1274 | wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1]; | |
1275 | #endif | |
1276 | /* If we have a narrower mode, we can do something. */ | |
1277 | if (wanted_mode != VOIDmode | |
1278 | && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) | |
1279 | { | |
1280 | int offset = pos / BITS_PER_UNIT; | |
1281 | rtx old_pos = XEXP (x, 2); | |
1282 | rtx newmem; | |
1283 | ||
1284 | /* If the bytes and bits are counted differently, we | |
1285 | must adjust the offset. */ | |
1286 | #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN | |
1287 | offset = (GET_MODE_SIZE (is_mode) | |
1288 | - GET_MODE_SIZE (wanted_mode) - offset); | |
1289 | #endif | |
1290 | ||
1291 | pos %= GET_MODE_BITSIZE (wanted_mode); | |
1292 | ||
1293 | newmem = gen_rtx (MEM, wanted_mode, | |
1294 | plus_constant (XEXP (tem, 0), offset)); | |
1295 | RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem); | |
1296 | MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem); | |
1297 | MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem); | |
1298 | ||
1299 | /* Make the change and see if the insn remains valid. */ | |
1300 | INSN_CODE (insn) = -1; | |
1301 | XEXP (x, 0) = newmem; | |
1302 | XEXP (x, 2) = GEN_INT (pos); | |
1303 | ||
1304 | if (recog_memoized (insn) >= 0) | |
1305 | return; | |
1306 | ||
1307 | /* Otherwise, restore old position. XEXP (x, 0) will be | |
1308 | restored later. */ | |
1309 | XEXP (x, 2) = old_pos; | |
1310 | } | |
1311 | } | |
1312 | ||
1313 | /* If we get here, the bitfield extract insn can't accept a memory | |
1314 | reference. Copy the input into a register. */ | |
1315 | ||
1316 | tem1 = gen_reg_rtx (GET_MODE (tem)); | |
1317 | emit_insn_before (gen_move_insn (tem1, tem), insn); | |
1318 | XEXP (x, 0) = tem1; | |
1319 | return; | |
1320 | } | |
1321 | break; | |
1322 | ||
1323 | case SUBREG: | |
1324 | if (SUBREG_REG (x) == var) | |
1325 | { | |
1326 | /* If this is a special SUBREG made because VAR was promoted | |
1327 | from a wider mode, replace it with VAR and call ourself | |
1328 | recursively, this time saying that the object previously | |
1329 | had its current mode (by virtue of the SUBREG). */ | |
1330 | ||
1331 | if (SUBREG_PROMOTED_VAR_P (x)) | |
1332 | { | |
1333 | *loc = var; | |
1334 | fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements); | |
1335 | return; | |
1336 | } | |
1337 | ||
1338 | /* If this SUBREG makes VAR wider, it has become a paradoxical | |
1339 | SUBREG with VAR in memory, but these aren't allowed at this | |
1340 | stage of the compilation. So load VAR into a pseudo and take | |
1341 | a SUBREG of that pseudo. */ | |
1342 | if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var))) | |
1343 | { | |
1344 | replacement = find_fixup_replacement (replacements, var); | |
1345 | if (replacement->new == 0) | |
1346 | replacement->new = gen_reg_rtx (GET_MODE (var)); | |
1347 | SUBREG_REG (x) = replacement->new; | |
1348 | return; | |
1349 | } | |
1350 | ||
1351 | /* See if we have already found a replacement for this SUBREG. | |
1352 | If so, use it. Otherwise, make a MEM and see if the insn | |
1353 | is recognized. If not, or if we should force MEM into a register, | |
1354 | make a pseudo for this SUBREG. */ | |
1355 | replacement = find_fixup_replacement (replacements, x); | |
1356 | if (replacement->new) | |
1357 | { | |
1358 | *loc = replacement->new; | |
1359 | return; | |
1360 | } | |
1361 | ||
1362 | replacement->new = *loc = fixup_memory_subreg (x, insn, 0); | |
1363 | ||
1364 | INSN_CODE (insn) = -1; | |
1365 | if (! flag_force_mem && recog_memoized (insn) >= 0) | |
1366 | return; | |
1367 | ||
1368 | *loc = replacement->new = gen_reg_rtx (GET_MODE (x)); | |
1369 | return; | |
1370 | } | |
1371 | break; | |
1372 | ||
1373 | case SET: | |
1374 | /* First do special simplification of bit-field references. */ | |
1375 | if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT | |
1376 | || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT) | |
1377 | optimize_bit_field (x, insn, 0); | |
1378 | if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT | |
1379 | || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT) | |
1380 | optimize_bit_field (x, insn, NULL_PTR); | |
1381 | ||
1382 | /* If SET_DEST is now a paradoxical SUBREG, put the result of this | |
1383 | insn into a pseudo and store the low part of the pseudo into VAR. */ | |
1384 | if (GET_CODE (SET_DEST (x)) == SUBREG | |
1385 | && SUBREG_REG (SET_DEST (x)) == var | |
1386 | && (GET_MODE_SIZE (GET_MODE (SET_DEST (x))) | |
1387 | > GET_MODE_SIZE (GET_MODE (var)))) | |
1388 | { | |
1389 | SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x))); | |
1390 | emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var), | |
1391 | tem)), | |
1392 | insn); | |
1393 | break; | |
1394 | } | |
1395 | ||
1396 | { | |
1397 | rtx dest = SET_DEST (x); | |
1398 | rtx src = SET_SRC (x); | |
1399 | rtx outerdest = dest; | |
1400 | ||
1401 | while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART | |
1402 | || GET_CODE (dest) == SIGN_EXTRACT | |
1403 | || GET_CODE (dest) == ZERO_EXTRACT) | |
1404 | dest = XEXP (dest, 0); | |
1405 | ||
1406 | if (GET_CODE (src) == SUBREG) | |
1407 | src = XEXP (src, 0); | |
1408 | ||
1409 | /* If VAR does not appear at the top level of the SET | |
1410 | just scan the lower levels of the tree. */ | |
1411 | ||
1412 | if (src != var && dest != var) | |
1413 | break; | |
1414 | ||
1415 | /* We will need to rerecognize this insn. */ | |
1416 | INSN_CODE (insn) = -1; | |
1417 | ||
1418 | #ifdef HAVE_insv | |
1419 | if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var) | |
1420 | { | |
1421 | /* Since this case will return, ensure we fixup all the | |
1422 | operands here. */ | |
1423 | fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1), | |
1424 | insn, replacements); | |
1425 | fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2), | |
1426 | insn, replacements); | |
1427 | fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x), | |
1428 | insn, replacements); | |
1429 | ||
1430 | tem = XEXP (outerdest, 0); | |
1431 | ||
1432 | /* Clean up (SUBREG:SI (MEM:mode ...) 0) | |
1433 | that may appear inside a ZERO_EXTRACT. | |
1434 | This was legitimate when the MEM was a REG. */ | |
1435 | if (GET_CODE (tem) == SUBREG | |
1436 | && SUBREG_REG (tem) == var) | |
1437 | tem = fixup_memory_subreg (tem, insn, 1); | |
1438 | else | |
1439 | tem = fixup_stack_1 (tem, insn); | |
1440 | ||
1441 | if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT | |
1442 | && GET_CODE (XEXP (outerdest, 2)) == CONST_INT | |
1443 | && ! mode_dependent_address_p (XEXP (tem, 0)) | |
1444 | && ! MEM_VOLATILE_P (tem)) | |
1445 | { | |
1446 | enum machine_mode wanted_mode | |
1447 | = insn_operand_mode[(int) CODE_FOR_insv][0]; | |
1448 | enum machine_mode is_mode = GET_MODE (tem); | |
1449 | int width = INTVAL (XEXP (outerdest, 1)); | |
1450 | int pos = INTVAL (XEXP (outerdest, 2)); | |
1451 | ||
1452 | /* If we have a narrower mode, we can do something. */ | |
1453 | if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) | |
1454 | { | |
1455 | int offset = pos / BITS_PER_UNIT; | |
1456 | rtx old_pos = XEXP (outerdest, 2); | |
1457 | rtx newmem; | |
1458 | ||
1459 | #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN | |
1460 | offset = (GET_MODE_SIZE (is_mode) | |
1461 | - GET_MODE_SIZE (wanted_mode) - offset); | |
1462 | #endif | |
1463 | ||
1464 | pos %= GET_MODE_BITSIZE (wanted_mode); | |
1465 | ||
1466 | newmem = gen_rtx (MEM, wanted_mode, | |
1467 | plus_constant (XEXP (tem, 0), offset)); | |
1468 | RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem); | |
1469 | MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem); | |
1470 | MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem); | |
1471 | ||
1472 | /* Make the change and see if the insn remains valid. */ | |
1473 | INSN_CODE (insn) = -1; | |
1474 | XEXP (outerdest, 0) = newmem; | |
1475 | XEXP (outerdest, 2) = GEN_INT (pos); | |
1476 | ||
1477 | if (recog_memoized (insn) >= 0) | |
1478 | return; | |
1479 | ||
1480 | /* Otherwise, restore old position. XEXP (x, 0) will be | |
1481 | restored later. */ | |
1482 | XEXP (outerdest, 2) = old_pos; | |
1483 | } | |
1484 | } | |
1485 | ||
1486 | /* If we get here, the bit-field store doesn't allow memory | |
1487 | or isn't located at a constant position. Load the value into | |
1488 | a register, do the store, and put it back into memory. */ | |
1489 | ||
1490 | tem1 = gen_reg_rtx (GET_MODE (tem)); | |
1491 | emit_insn_before (gen_move_insn (tem1, tem), insn); | |
1492 | emit_insn_after (gen_move_insn (tem, tem1), insn); | |
1493 | XEXP (outerdest, 0) = tem1; | |
1494 | return; | |
1495 | } | |
1496 | #endif | |
1497 | ||
1498 | /* STRICT_LOW_PART is a no-op on memory references | |
1499 | and it can cause combinations to be unrecognizable, | |
1500 | so eliminate it. */ | |
1501 | ||
1502 | if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART) | |
1503 | SET_DEST (x) = XEXP (SET_DEST (x), 0); | |
1504 | ||
1505 | /* A valid insn to copy VAR into or out of a register | |
1506 | must be left alone, to avoid an infinite loop here. | |
1507 | If the reference to VAR is by a subreg, fix that up, | |
1508 | since SUBREG is not valid for a memref. | |
1509 | Also fix up the address of the stack slot. | |
1510 | ||
1511 | Note that we must not try to recognize the insn until | |
1512 | after we know that we have valid addresses and no | |
1513 | (subreg (mem ...) ...) constructs, since these interfere | |
1514 | with determining the validity of the insn. */ | |
1515 | ||
1516 | if ((SET_SRC (x) == var | |
1517 | || (GET_CODE (SET_SRC (x)) == SUBREG | |
1518 | && SUBREG_REG (SET_SRC (x)) == var)) | |
1519 | && (GET_CODE (SET_DEST (x)) == REG | |
1520 | || (GET_CODE (SET_DEST (x)) == SUBREG | |
1521 | && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)) | |
1522 | && x == single_set (PATTERN (insn))) | |
1523 | { | |
1524 | rtx pat; | |
1525 | ||
1526 | replacement = find_fixup_replacement (replacements, SET_SRC (x)); | |
1527 | if (replacement->new) | |
1528 | SET_SRC (x) = replacement->new; | |
1529 | else if (GET_CODE (SET_SRC (x)) == SUBREG) | |
1530 | SET_SRC (x) = replacement->new | |
1531 | = fixup_memory_subreg (SET_SRC (x), insn, 0); | |
1532 | else | |
1533 | SET_SRC (x) = replacement->new | |
1534 | = fixup_stack_1 (SET_SRC (x), insn); | |
1535 | ||
1536 | if (recog_memoized (insn) >= 0) | |
1537 | return; | |
1538 | ||
1539 | /* INSN is not valid, but we know that we want to | |
1540 | copy SET_SRC (x) to SET_DEST (x) in some way. So | |
1541 | we generate the move and see whether it requires more | |
1542 | than one insn. If it does, we emit those insns and | |
1543 | delete INSN. Otherwise, we an just replace the pattern | |
1544 | of INSN; we have already verified above that INSN has | |
1545 | no other function that to do X. */ | |
1546 | ||
1547 | pat = gen_move_insn (SET_DEST (x), SET_SRC (x)); | |
1548 | if (GET_CODE (pat) == SEQUENCE) | |
1549 | { | |
1550 | emit_insn_after (pat, insn); | |
1551 | PUT_CODE (insn, NOTE); | |
1552 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1553 | NOTE_SOURCE_FILE (insn) = 0; | |
1554 | } | |
1555 | else | |
1556 | PATTERN (insn) = pat; | |
1557 | ||
1558 | return; | |
1559 | } | |
1560 | ||
1561 | if ((SET_DEST (x) == var | |
1562 | || (GET_CODE (SET_DEST (x)) == SUBREG | |
1563 | && SUBREG_REG (SET_DEST (x)) == var)) | |
1564 | && (GET_CODE (SET_SRC (x)) == REG | |
1565 | || (GET_CODE (SET_SRC (x)) == SUBREG | |
1566 | && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG)) | |
1567 | && x == single_set (PATTERN (insn))) | |
1568 | { | |
1569 | rtx pat; | |
1570 | ||
1571 | if (GET_CODE (SET_DEST (x)) == SUBREG) | |
1572 | SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0); | |
1573 | else | |
1574 | SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn); | |
1575 | ||
1576 | if (recog_memoized (insn) >= 0) | |
1577 | return; | |
1578 | ||
1579 | pat = gen_move_insn (SET_DEST (x), SET_SRC (x)); | |
1580 | if (GET_CODE (pat) == SEQUENCE) | |
1581 | { | |
1582 | emit_insn_after (pat, insn); | |
1583 | PUT_CODE (insn, NOTE); | |
1584 | NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; | |
1585 | NOTE_SOURCE_FILE (insn) = 0; | |
1586 | } | |
1587 | else | |
1588 | PATTERN (insn) = pat; | |
1589 | ||
1590 | return; | |
1591 | } | |
1592 | ||
1593 | /* Otherwise, storing into VAR must be handled specially | |
1594 | by storing into a temporary and copying that into VAR | |
1595 | with a new insn after this one. Note that this case | |
1596 | will be used when storing into a promoted scalar since | |
1597 | the insn will now have different modes on the input | |
1598 | and output and hence will be invalid (except for the case | |
1599 | of setting it to a constant, which does not need any | |
1600 | change if it is valid). We generate extra code in that case, | |
1601 | but combine.c will eliminate it. */ | |
1602 | ||
1603 | if (dest == var) | |
1604 | { | |
1605 | rtx temp; | |
1606 | rtx fixeddest = SET_DEST (x); | |
1607 | ||
1608 | /* STRICT_LOW_PART can be discarded, around a MEM. */ | |
1609 | if (GET_CODE (fixeddest) == STRICT_LOW_PART) | |
1610 | fixeddest = XEXP (fixeddest, 0); | |
1611 | /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */ | |
1612 | if (GET_CODE (fixeddest) == SUBREG) | |
1613 | fixeddest = fixup_memory_subreg (fixeddest, insn, 0); | |
1614 | else | |
1615 | fixeddest = fixup_stack_1 (fixeddest, insn); | |
1616 | ||
1617 | temp = gen_reg_rtx (GET_MODE (SET_SRC (x)) == VOIDmode | |
1618 | ? GET_MODE (fixeddest) | |
1619 | : GET_MODE (SET_SRC (x))); | |
1620 | ||
1621 | emit_insn_after (gen_move_insn (fixeddest, | |
1622 | gen_lowpart (GET_MODE (fixeddest), | |
1623 | temp)), | |
1624 | insn); | |
1625 | ||
1626 | SET_DEST (x) = temp; | |
1627 | } | |
1628 | } | |
1629 | } | |
1630 | ||
1631 | /* Nothing special about this RTX; fix its operands. */ | |
1632 | ||
1633 | fmt = GET_RTX_FORMAT (code); | |
1634 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1635 | { | |
1636 | if (fmt[i] == 'e') | |
1637 | fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements); | |
1638 | if (fmt[i] == 'E') | |
1639 | { | |
1640 | register int j; | |
1641 | for (j = 0; j < XVECLEN (x, i); j++) | |
1642 | fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j), | |
1643 | insn, replacements); | |
1644 | } | |
1645 | } | |
1646 | } | |
1647 | \f | |
1648 | /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)), | |
1649 | return an rtx (MEM:m1 newaddr) which is equivalent. | |
1650 | If any insns must be emitted to compute NEWADDR, put them before INSN. | |
1651 | ||
1652 | UNCRITICAL nonzero means accept paradoxical subregs. | |
1653 | This is used for subregs found inside of ZERO_EXTRACTs and in REG_NOTES. */ | |
1654 | ||
1655 | static rtx | |
1656 | fixup_memory_subreg (x, insn, uncritical) | |
1657 | rtx x; | |
1658 | rtx insn; | |
1659 | int uncritical; | |
1660 | { | |
1661 | int offset = SUBREG_WORD (x) * UNITS_PER_WORD; | |
1662 | rtx addr = XEXP (SUBREG_REG (x), 0); | |
1663 | enum machine_mode mode = GET_MODE (x); | |
1664 | rtx saved, result; | |
1665 | ||
1666 | /* Paradoxical SUBREGs are usually invalid during RTL generation. */ | |
1667 | if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) | |
1668 | && ! uncritical) | |
1669 | abort (); | |
1670 | ||
1671 | #if BYTES_BIG_ENDIAN | |
1672 | offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
1673 | - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))); | |
1674 | #endif | |
1675 | addr = plus_constant (addr, offset); | |
1676 | if (!flag_force_addr && memory_address_p (mode, addr)) | |
1677 | /* Shortcut if no insns need be emitted. */ | |
1678 | return change_address (SUBREG_REG (x), mode, addr); | |
1679 | start_sequence (); | |
1680 | result = change_address (SUBREG_REG (x), mode, addr); | |
1681 | emit_insn_before (gen_sequence (), insn); | |
1682 | end_sequence (); | |
1683 | return result; | |
1684 | } | |
1685 | ||
1686 | /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X. | |
1687 | Replace subexpressions of X in place. | |
1688 | If X itself is a (SUBREG (MEM ...) ...), return the replacement expression. | |
1689 | Otherwise return X, with its contents possibly altered. | |
1690 | ||
1691 | If any insns must be emitted to compute NEWADDR, put them before INSN. | |
1692 | ||
1693 | UNCRITICAL is as in fixup_memory_subreg. */ | |
1694 | ||
1695 | static rtx | |
1696 | walk_fixup_memory_subreg (x, insn, uncritical) | |
1697 | register rtx x; | |
1698 | rtx insn; | |
1699 | int uncritical; | |
1700 | { | |
1701 | register enum rtx_code code; | |
1702 | register char *fmt; | |
1703 | register int i; | |
1704 | ||
1705 | if (x == 0) | |
1706 | return 0; | |
1707 | ||
1708 | code = GET_CODE (x); | |
1709 | ||
1710 | if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM) | |
1711 | return fixup_memory_subreg (x, insn, uncritical); | |
1712 | ||
1713 | /* Nothing special about this RTX; fix its operands. */ | |
1714 | ||
1715 | fmt = GET_RTX_FORMAT (code); | |
1716 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1717 | { | |
1718 | if (fmt[i] == 'e') | |
1719 | XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical); | |
1720 | if (fmt[i] == 'E') | |
1721 | { | |
1722 | register int j; | |
1723 | for (j = 0; j < XVECLEN (x, i); j++) | |
1724 | XVECEXP (x, i, j) | |
1725 | = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical); | |
1726 | } | |
1727 | } | |
1728 | return x; | |
1729 | } | |
1730 | \f | |
1731 | #if 0 | |
1732 | /* Fix up any references to stack slots that are invalid memory addresses | |
1733 | because they exceed the maximum range of a displacement. */ | |
1734 | ||
1735 | void | |
1736 | fixup_stack_slots () | |
1737 | { | |
1738 | register rtx insn; | |
1739 | ||
1740 | /* Did we generate a stack slot that is out of range | |
1741 | or otherwise has an invalid address? */ | |
1742 | if (invalid_stack_slot) | |
1743 | { | |
1744 | /* Yes. Must scan all insns for stack-refs that exceed the limit. */ | |
1745 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
1746 | if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN | |
1747 | || GET_CODE (insn) == JUMP_INSN) | |
1748 | fixup_stack_1 (PATTERN (insn), insn); | |
1749 | } | |
1750 | } | |
1751 | #endif | |
1752 | ||
1753 | /* For each memory ref within X, if it refers to a stack slot | |
1754 | with an out of range displacement, put the address in a temp register | |
1755 | (emitting new insns before INSN to load these registers) | |
1756 | and alter the memory ref to use that register. | |
1757 | Replace each such MEM rtx with a copy, to avoid clobberage. */ | |
1758 | ||
1759 | static rtx | |
1760 | fixup_stack_1 (x, insn) | |
1761 | rtx x; | |
1762 | rtx insn; | |
1763 | { | |
1764 | register int i; | |
1765 | register RTX_CODE code = GET_CODE (x); | |
1766 | register char *fmt; | |
1767 | ||
1768 | if (code == MEM) | |
1769 | { | |
1770 | register rtx ad = XEXP (x, 0); | |
1771 | /* If we have address of a stack slot but it's not valid | |
1772 | (displacement is too large), compute the sum in a register. */ | |
1773 | if (GET_CODE (ad) == PLUS | |
1774 | && GET_CODE (XEXP (ad, 0)) == REG | |
1775 | && REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER | |
1776 | && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER | |
1777 | && GET_CODE (XEXP (ad, 1)) == CONST_INT) | |
1778 | { | |
1779 | rtx temp, seq; | |
1780 | if (memory_address_p (GET_MODE (x), ad)) | |
1781 | return x; | |
1782 | ||
1783 | start_sequence (); | |
1784 | temp = copy_to_reg (ad); | |
1785 | seq = gen_sequence (); | |
1786 | end_sequence (); | |
1787 | emit_insn_before (seq, insn); | |
1788 | return change_address (x, VOIDmode, temp); | |
1789 | } | |
1790 | return x; | |
1791 | } | |
1792 | ||
1793 | fmt = GET_RTX_FORMAT (code); | |
1794 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1795 | { | |
1796 | if (fmt[i] == 'e') | |
1797 | XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn); | |
1798 | if (fmt[i] == 'E') | |
1799 | { | |
1800 | register int j; | |
1801 | for (j = 0; j < XVECLEN (x, i); j++) | |
1802 | XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn); | |
1803 | } | |
1804 | } | |
1805 | return x; | |
1806 | } | |
1807 | \f | |
1808 | /* Optimization: a bit-field instruction whose field | |
1809 | happens to be a byte or halfword in memory | |
1810 | can be changed to a move instruction. | |
1811 | ||
1812 | We call here when INSN is an insn to examine or store into a bit-field. | |
1813 | BODY is the SET-rtx to be altered. | |
1814 | ||
1815 | EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0. | |
1816 | (Currently this is called only from function.c, and EQUIV_MEM | |
1817 | is always 0.) */ | |
1818 | ||
1819 | static void | |
1820 | optimize_bit_field (body, insn, equiv_mem) | |
1821 | rtx body; | |
1822 | rtx insn; | |
1823 | rtx *equiv_mem; | |
1824 | { | |
1825 | register rtx bitfield; | |
1826 | int destflag; | |
1827 | rtx seq = 0; | |
1828 | enum machine_mode mode; | |
1829 | ||
1830 | if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT | |
1831 | || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT) | |
1832 | bitfield = SET_DEST (body), destflag = 1; | |
1833 | else | |
1834 | bitfield = SET_SRC (body), destflag = 0; | |
1835 | ||
1836 | /* First check that the field being stored has constant size and position | |
1837 | and is in fact a byte or halfword suitably aligned. */ | |
1838 | ||
1839 | if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT | |
1840 | && GET_CODE (XEXP (bitfield, 2)) == CONST_INT | |
1841 | && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1)) | |
1842 | != BLKmode) | |
1843 | && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0) | |
1844 | { | |
1845 | register rtx memref = 0; | |
1846 | ||
1847 | /* Now check that the containing word is memory, not a register, | |
1848 | and that it is safe to change the machine mode. */ | |
1849 | ||
1850 | if (GET_CODE (XEXP (bitfield, 0)) == MEM) | |
1851 | memref = XEXP (bitfield, 0); | |
1852 | else if (GET_CODE (XEXP (bitfield, 0)) == REG | |
1853 | && equiv_mem != 0) | |
1854 | memref = equiv_mem[REGNO (XEXP (bitfield, 0))]; | |
1855 | else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG | |
1856 | && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM) | |
1857 | memref = SUBREG_REG (XEXP (bitfield, 0)); | |
1858 | else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG | |
1859 | && equiv_mem != 0 | |
1860 | && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG) | |
1861 | memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))]; | |
1862 | ||
1863 | if (memref | |
1864 | && ! mode_dependent_address_p (XEXP (memref, 0)) | |
1865 | && ! MEM_VOLATILE_P (memref)) | |
1866 | { | |
1867 | /* Now adjust the address, first for any subreg'ing | |
1868 | that we are now getting rid of, | |
1869 | and then for which byte of the word is wanted. */ | |
1870 | ||
1871 | register int offset = INTVAL (XEXP (bitfield, 2)); | |
1872 | /* Adjust OFFSET to count bits from low-address byte. */ | |
1873 | #if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN | |
1874 | offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0))) | |
1875 | - offset - INTVAL (XEXP (bitfield, 1))); | |
1876 | #endif | |
1877 | /* Adjust OFFSET to count bytes from low-address byte. */ | |
1878 | offset /= BITS_PER_UNIT; | |
1879 | if (GET_CODE (XEXP (bitfield, 0)) == SUBREG) | |
1880 | { | |
1881 | offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD; | |
1882 | #if BYTES_BIG_ENDIAN | |
1883 | offset -= (MIN (UNITS_PER_WORD, | |
1884 | GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0)))) | |
1885 | - MIN (UNITS_PER_WORD, | |
1886 | GET_MODE_SIZE (GET_MODE (memref)))); | |
1887 | #endif | |
1888 | } | |
1889 | ||
1890 | memref = change_address (memref, mode, | |
1891 | plus_constant (XEXP (memref, 0), offset)); | |
1892 | ||
1893 | /* Store this memory reference where | |
1894 | we found the bit field reference. */ | |
1895 | ||
1896 | if (destflag) | |
1897 | { | |
1898 | validate_change (insn, &SET_DEST (body), memref, 1); | |
1899 | if (! CONSTANT_ADDRESS_P (SET_SRC (body))) | |
1900 | { | |
1901 | rtx src = SET_SRC (body); | |
1902 | while (GET_CODE (src) == SUBREG | |
1903 | && SUBREG_WORD (src) == 0) | |
1904 | src = SUBREG_REG (src); | |
1905 | if (GET_MODE (src) != GET_MODE (memref)) | |
1906 | src = gen_lowpart (GET_MODE (memref), SET_SRC (body)); | |
1907 | validate_change (insn, &SET_SRC (body), src, 1); | |
1908 | } | |
1909 | else if (GET_MODE (SET_SRC (body)) != VOIDmode | |
1910 | && GET_MODE (SET_SRC (body)) != GET_MODE (memref)) | |
1911 | /* This shouldn't happen because anything that didn't have | |
1912 | one of these modes should have got converted explicitly | |
1913 | and then referenced through a subreg. | |
1914 | This is so because the original bit-field was | |
1915 | handled by agg_mode and so its tree structure had | |
1916 | the same mode that memref now has. */ | |
1917 | abort (); | |
1918 | } | |
1919 | else | |
1920 | { | |
1921 | rtx dest = SET_DEST (body); | |
1922 | ||
1923 | while (GET_CODE (dest) == SUBREG | |
1924 | && SUBREG_WORD (dest) == 0) | |
1925 | dest = SUBREG_REG (dest); | |
1926 | ||
1927 | validate_change (insn, &SET_DEST (body), dest, 1); | |
1928 | ||
1929 | if (GET_MODE (dest) == GET_MODE (memref)) | |
1930 | validate_change (insn, &SET_SRC (body), memref, 1); | |
1931 | else | |
1932 | { | |
1933 | /* Convert the mem ref to the destination mode. */ | |
1934 | rtx newreg = gen_reg_rtx (GET_MODE (dest)); | |
1935 | ||
1936 | start_sequence (); | |
1937 | convert_move (newreg, memref, | |
1938 | GET_CODE (SET_SRC (body)) == ZERO_EXTRACT); | |
1939 | seq = get_insns (); | |
1940 | end_sequence (); | |
1941 | ||
1942 | validate_change (insn, &SET_SRC (body), newreg, 1); | |
1943 | } | |
1944 | } | |
1945 | ||
1946 | /* See if we can convert this extraction or insertion into | |
1947 | a simple move insn. We might not be able to do so if this | |
1948 | was, for example, part of a PARALLEL. | |
1949 | ||
1950 | If we succeed, write out any needed conversions. If we fail, | |
1951 | it is hard to guess why we failed, so don't do anything | |
1952 | special; just let the optimization be suppressed. */ | |
1953 | ||
1954 | if (apply_change_group () && seq) | |
1955 | emit_insns_before (seq, insn); | |
1956 | } | |
1957 | } | |
1958 | } | |
1959 | \f | |
1960 | /* These routines are responsible for converting virtual register references | |
1961 | to the actual hard register references once RTL generation is complete. | |
1962 | ||
1963 | The following four variables are used for communication between the | |
1964 | routines. They contain the offsets of the virtual registers from their | |
1965 | respective hard registers. */ | |
1966 | ||
1967 | static int in_arg_offset; | |
1968 | static int var_offset; | |
1969 | static int dynamic_offset; | |
1970 | static int out_arg_offset; | |
1971 | ||
1972 | /* In most machines, the stack pointer register is equivalent to the bottom | |
1973 | of the stack. */ | |
1974 | ||
1975 | #ifndef STACK_POINTER_OFFSET | |
1976 | #define STACK_POINTER_OFFSET 0 | |
1977 | #endif | |
1978 | ||
1979 | /* If not defined, pick an appropriate default for the offset of dynamically | |
1980 | allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS, | |
1981 | REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */ | |
1982 | ||
1983 | #ifndef STACK_DYNAMIC_OFFSET | |
1984 | ||
1985 | #ifdef ACCUMULATE_OUTGOING_ARGS | |
1986 | /* The bottom of the stack points to the actual arguments. If | |
1987 | REG_PARM_STACK_SPACE is defined, this includes the space for the register | |
1988 | parameters. However, if OUTGOING_REG_PARM_STACK space is not defined, | |
1989 | stack space for register parameters is not pushed by the caller, but | |
1990 | rather part of the fixed stack areas and hence not included in | |
1991 | `current_function_outgoing_args_size'. Nevertheless, we must allow | |
1992 | for it when allocating stack dynamic objects. */ | |
1993 | ||
1994 | #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE) | |
1995 | #define STACK_DYNAMIC_OFFSET(FNDECL) \ | |
1996 | (current_function_outgoing_args_size \ | |
1997 | + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET)) | |
1998 | ||
1999 | #else | |
2000 | #define STACK_DYNAMIC_OFFSET(FNDECL) \ | |
2001 | (current_function_outgoing_args_size + (STACK_POINTER_OFFSET)) | |
2002 | #endif | |
2003 | ||
2004 | #else | |
2005 | #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET | |
2006 | #endif | |
2007 | #endif | |
2008 | ||
2009 | /* Pass through the INSNS of function FNDECL and convert virtual register | |
2010 | references to hard register references. */ | |
2011 | ||
2012 | void | |
2013 | instantiate_virtual_regs (fndecl, insns) | |
2014 | tree fndecl; | |
2015 | rtx insns; | |
2016 | { | |
2017 | rtx insn; | |
2018 | ||
2019 | /* Compute the offsets to use for this function. */ | |
2020 | in_arg_offset = FIRST_PARM_OFFSET (fndecl); | |
2021 | var_offset = STARTING_FRAME_OFFSET; | |
2022 | dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl); | |
2023 | out_arg_offset = STACK_POINTER_OFFSET; | |
2024 | ||
2025 | /* Scan all variables and parameters of this function. For each that is | |
2026 | in memory, instantiate all virtual registers if the result is a valid | |
2027 | address. If not, we do it later. That will handle most uses of virtual | |
2028 | regs on many machines. */ | |
2029 | instantiate_decls (fndecl, 1); | |
2030 | ||
2031 | /* Initialize recognition, indicating that volatile is OK. */ | |
2032 | init_recog (); | |
2033 | ||
2034 | /* Scan through all the insns, instantiating every virtual register still | |
2035 | present. */ | |
2036 | for (insn = insns; insn; insn = NEXT_INSN (insn)) | |
2037 | if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN | |
2038 | || GET_CODE (insn) == CALL_INSN) | |
2039 | { | |
2040 | instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1); | |
2041 | instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0); | |
2042 | } | |
2043 | ||
2044 | /* Now instantiate the remaining register equivalences for debugging info. | |
2045 | These will not be valid addresses. */ | |
2046 | instantiate_decls (fndecl, 0); | |
2047 | ||
2048 | /* Indicate that, from now on, assign_stack_local should use | |
2049 | frame_pointer_rtx. */ | |
2050 | virtuals_instantiated = 1; | |
2051 | } | |
2052 | ||
2053 | /* Scan all decls in FNDECL (both variables and parameters) and instantiate | |
2054 | all virtual registers in their DECL_RTL's. | |
2055 | ||
2056 | If VALID_ONLY, do this only if the resulting address is still valid. | |
2057 | Otherwise, always do it. */ | |
2058 | ||
2059 | static void | |
2060 | instantiate_decls (fndecl, valid_only) | |
2061 | tree fndecl; | |
2062 | int valid_only; | |
2063 | { | |
2064 | tree decl; | |
2065 | ||
2066 | if (DECL_INLINE (fndecl)) | |
2067 | /* When compiling an inline function, the obstack used for | |
2068 | rtl allocation is the maybepermanent_obstack. Calling | |
2069 | `resume_temporary_allocation' switches us back to that | |
2070 | obstack while we process this function's parameters. */ | |
2071 | resume_temporary_allocation (); | |
2072 | ||
2073 | /* Process all parameters of the function. */ | |
2074 | for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl)) | |
2075 | { | |
2076 | instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)), | |
2077 | valid_only); | |
2078 | instantiate_decl (DECL_INCOMING_RTL (decl), | |
2079 | int_size_in_bytes (TREE_TYPE (decl)), valid_only); | |
2080 | } | |
2081 | ||
2082 | /* Now process all variables defined in the function or its subblocks. */ | |
2083 | instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only); | |
2084 | ||
2085 | if (DECL_INLINE (fndecl)) | |
2086 | { | |
2087 | /* Save all rtl allocated for this function by raising the | |
2088 | high-water mark on the maybepermanent_obstack. */ | |
2089 | preserve_data (); | |
2090 | /* All further rtl allocation is now done in the current_obstack. */ | |
2091 | rtl_in_current_obstack (); | |
2092 | } | |
2093 | } | |
2094 | ||
2095 | /* Subroutine of instantiate_decls: Process all decls in the given | |
2096 | BLOCK node and all its subblocks. */ | |
2097 | ||
2098 | static void | |
2099 | instantiate_decls_1 (let, valid_only) | |
2100 | tree let; | |
2101 | int valid_only; | |
2102 | { | |
2103 | tree t; | |
2104 | ||
2105 | for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t)) | |
2106 | instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)), | |
2107 | valid_only); | |
2108 | ||
2109 | /* Process all subblocks. */ | |
2110 | for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t)) | |
2111 | instantiate_decls_1 (t, valid_only); | |
2112 | } | |
2113 | ||
2114 | /* Subroutine of the preceding procedures: Given RTL representing a | |
2115 | decl and the size of the object, do any instantiation required. | |
2116 | ||
2117 | If VALID_ONLY is non-zero, it means that the RTL should only be | |
2118 | changed if the new address is valid. */ | |
2119 | ||
2120 | static void | |
2121 | instantiate_decl (x, size, valid_only) | |
2122 | rtx x; | |
2123 | int size; | |
2124 | int valid_only; | |
2125 | { | |
2126 | enum machine_mode mode; | |
2127 | rtx addr; | |
2128 | ||
2129 | /* If this is not a MEM, no need to do anything. Similarly if the | |
2130 | address is a constant or a register that is not a virtual register. */ | |
2131 | ||
2132 | if (x == 0 || GET_CODE (x) != MEM) | |
2133 | return; | |
2134 | ||
2135 | addr = XEXP (x, 0); | |
2136 | if (CONSTANT_P (addr) | |
2137 | || (GET_CODE (addr) == REG | |
2138 | && (REGNO (addr) < FIRST_VIRTUAL_REGISTER | |
2139 | || REGNO (addr) > LAST_VIRTUAL_REGISTER))) | |
2140 | return; | |
2141 | ||
2142 | /* If we should only do this if the address is valid, copy the address. | |
2143 | We need to do this so we can undo any changes that might make the | |
2144 | address invalid. This copy is unfortunate, but probably can't be | |
2145 | avoided. */ | |
2146 | ||
2147 | if (valid_only) | |
2148 | addr = copy_rtx (addr); | |
2149 | ||
2150 | instantiate_virtual_regs_1 (&addr, NULL_RTX, 0); | |
2151 | ||
2152 | if (! valid_only) | |
2153 | return; | |
2154 | ||
2155 | /* Now verify that the resulting address is valid for every integer or | |
2156 | floating-point mode up to and including SIZE bytes long. We do this | |
2157 | since the object might be accessed in any mode and frame addresses | |
2158 | are shared. */ | |
2159 | ||
2160 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
2161 | mode != VOIDmode && GET_MODE_SIZE (mode) <= size; | |
2162 | mode = GET_MODE_WIDER_MODE (mode)) | |
2163 | if (! memory_address_p (mode, addr)) | |
2164 | return; | |
2165 | ||
2166 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); | |
2167 | mode != VOIDmode && GET_MODE_SIZE (mode) <= size; | |
2168 | mode = GET_MODE_WIDER_MODE (mode)) | |
2169 | if (! memory_address_p (mode, addr)) | |
2170 | return; | |
2171 | ||
2172 | /* Otherwise, put back the address, now that we have updated it and we | |
2173 | know it is valid. */ | |
2174 | ||
2175 | XEXP (x, 0) = addr; | |
2176 | } | |
2177 | \f | |
2178 | /* Given a pointer to a piece of rtx and an optional pointer to the | |
2179 | containing object, instantiate any virtual registers present in it. | |
2180 | ||
2181 | If EXTRA_INSNS, we always do the replacement and generate | |
2182 | any extra insns before OBJECT. If it zero, we do nothing if replacement | |
2183 | is not valid. | |
2184 | ||
2185 | Return 1 if we either had nothing to do or if we were able to do the | |
2186 | needed replacement. Return 0 otherwise; we only return zero if | |
2187 | EXTRA_INSNS is zero. | |
2188 | ||
2189 | We first try some simple transformations to avoid the creation of extra | |
2190 | pseudos. */ | |
2191 | ||
2192 | static int | |
2193 | instantiate_virtual_regs_1 (loc, object, extra_insns) | |
2194 | rtx *loc; | |
2195 | rtx object; | |
2196 | int extra_insns; | |
2197 | { | |
2198 | rtx x; | |
2199 | RTX_CODE code; | |
2200 | rtx new = 0; | |
2201 | int offset; | |
2202 | rtx temp; | |
2203 | rtx seq; | |
2204 | int i, j; | |
2205 | char *fmt; | |
2206 | ||
2207 | /* Re-start here to avoid recursion in common cases. */ | |
2208 | restart: | |
2209 | ||
2210 | x = *loc; | |
2211 | if (x == 0) | |
2212 | return 1; | |
2213 | ||
2214 | code = GET_CODE (x); | |
2215 | ||
2216 | /* Check for some special cases. */ | |
2217 | switch (code) | |
2218 | { | |
2219 | case CONST_INT: | |
2220 | case CONST_DOUBLE: | |
2221 | case CONST: | |
2222 | case SYMBOL_REF: | |
2223 | case CODE_LABEL: | |
2224 | case PC: | |
2225 | case CC0: | |
2226 | case ASM_INPUT: | |
2227 | case ADDR_VEC: | |
2228 | case ADDR_DIFF_VEC: | |
2229 | case RETURN: | |
2230 | return 1; | |
2231 | ||
2232 | case SET: | |
2233 | /* We are allowed to set the virtual registers. This means that | |
2234 | that the actual register should receive the source minus the | |
2235 | appropriate offset. This is used, for example, in the handling | |
2236 | of non-local gotos. */ | |
2237 | if (SET_DEST (x) == virtual_incoming_args_rtx) | |
2238 | new = arg_pointer_rtx, offset = - in_arg_offset; | |
2239 | else if (SET_DEST (x) == virtual_stack_vars_rtx) | |
2240 | new = frame_pointer_rtx, offset = - var_offset; | |
2241 | else if (SET_DEST (x) == virtual_stack_dynamic_rtx) | |
2242 | new = stack_pointer_rtx, offset = - dynamic_offset; | |
2243 | else if (SET_DEST (x) == virtual_outgoing_args_rtx) | |
2244 | new = stack_pointer_rtx, offset = - out_arg_offset; | |
2245 | ||
2246 | if (new) | |
2247 | { | |
2248 | /* The only valid sources here are PLUS or REG. Just do | |
2249 | the simplest possible thing to handle them. */ | |
2250 | if (GET_CODE (SET_SRC (x)) != REG | |
2251 | && GET_CODE (SET_SRC (x)) != PLUS) | |
2252 | abort (); | |
2253 | ||
2254 | start_sequence (); | |
2255 | if (GET_CODE (SET_SRC (x)) != REG) | |
2256 | temp = force_operand (SET_SRC (x), NULL_RTX); | |
2257 | else | |
2258 | temp = SET_SRC (x); | |
2259 | temp = force_operand (plus_constant (temp, offset), NULL_RTX); | |
2260 | seq = get_insns (); | |
2261 | end_sequence (); | |
2262 | ||
2263 | emit_insns_before (seq, object); | |
2264 | SET_DEST (x) = new; | |
2265 | ||
2266 | if (!validate_change (object, &SET_SRC (x), temp, 0) | |
2267 | || ! extra_insns) | |
2268 | abort (); | |
2269 | ||
2270 | return 1; | |
2271 | } | |
2272 | ||
2273 | instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns); | |
2274 | loc = &SET_SRC (x); | |
2275 | goto restart; | |
2276 | ||
2277 | case PLUS: | |
2278 | /* Handle special case of virtual register plus constant. */ | |
2279 | if (CONSTANT_P (XEXP (x, 1))) | |
2280 | { | |
2281 | rtx old; | |
2282 | ||
2283 | /* Check for (plus (plus VIRT foo) (const_int)) first. */ | |
2284 | if (GET_CODE (XEXP (x, 0)) == PLUS) | |
2285 | { | |
2286 | rtx inner = XEXP (XEXP (x, 0), 0); | |
2287 | ||
2288 | if (inner == virtual_incoming_args_rtx) | |
2289 | new = arg_pointer_rtx, offset = in_arg_offset; | |
2290 | else if (inner == virtual_stack_vars_rtx) | |
2291 | new = frame_pointer_rtx, offset = var_offset; | |
2292 | else if (inner == virtual_stack_dynamic_rtx) | |
2293 | new = stack_pointer_rtx, offset = dynamic_offset; | |
2294 | else if (inner == virtual_outgoing_args_rtx) | |
2295 | new = stack_pointer_rtx, offset = out_arg_offset; | |
2296 | else | |
2297 | { | |
2298 | loc = &XEXP (x, 0); | |
2299 | goto restart; | |
2300 | } | |
2301 | ||
2302 | instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object, | |
2303 | extra_insns); | |
2304 | new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1)); | |
2305 | } | |
2306 | ||
2307 | else if (XEXP (x, 0) == virtual_incoming_args_rtx) | |
2308 | new = arg_pointer_rtx, offset = in_arg_offset; | |
2309 | else if (XEXP (x, 0) == virtual_stack_vars_rtx) | |
2310 | new = frame_pointer_rtx, offset = var_offset; | |
2311 | else if (XEXP (x, 0) == virtual_stack_dynamic_rtx) | |
2312 | new = stack_pointer_rtx, offset = dynamic_offset; | |
2313 | else if (XEXP (x, 0) == virtual_outgoing_args_rtx) | |
2314 | new = stack_pointer_rtx, offset = out_arg_offset; | |
2315 | else | |
2316 | { | |
2317 | /* We know the second operand is a constant. Unless the | |
2318 | first operand is a REG (which has been already checked), | |
2319 | it needs to be checked. */ | |
2320 | if (GET_CODE (XEXP (x, 0)) != REG) | |
2321 | { | |
2322 | loc = &XEXP (x, 0); | |
2323 | goto restart; | |
2324 | } | |
2325 | return 1; | |
2326 | } | |
2327 | ||
2328 | old = XEXP (x, 0); | |
2329 | XEXP (x, 0) = new; | |
2330 | new = plus_constant (XEXP (x, 1), offset); | |
2331 | ||
2332 | /* If the new constant is zero, try to replace the sum with its | |
2333 | first operand. */ | |
2334 | if (new == const0_rtx | |
2335 | && validate_change (object, loc, XEXP (x, 0), 0)) | |
2336 | return 1; | |
2337 | ||
2338 | /* Next try to replace constant with new one. */ | |
2339 | if (!validate_change (object, &XEXP (x, 1), new, 0)) | |
2340 | { | |
2341 | if (! extra_insns) | |
2342 | { | |
2343 | XEXP (x, 0) = old; | |
2344 | return 0; | |
2345 | } | |
2346 | ||
2347 | /* Otherwise copy the new constant into a register and replace | |
2348 | constant with that register. */ | |
2349 | temp = gen_reg_rtx (Pmode); | |
2350 | if (validate_change (object, &XEXP (x, 1), temp, 0)) | |
2351 | emit_insn_before (gen_move_insn (temp, new), object); | |
2352 | else | |
2353 | { | |
2354 | /* If that didn't work, replace this expression with a | |
2355 | register containing the sum. */ | |
2356 | ||
2357 | new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new); | |
2358 | XEXP (x, 0) = old; | |
2359 | ||
2360 | start_sequence (); | |
2361 | temp = force_operand (new, NULL_RTX); | |
2362 | seq = get_insns (); | |
2363 | end_sequence (); | |
2364 | ||
2365 | emit_insns_before (seq, object); | |
2366 | if (! validate_change (object, loc, temp, 0) | |
2367 | && ! validate_replace_rtx (x, temp, object)) | |
2368 | abort (); | |
2369 | } | |
2370 | } | |
2371 | ||
2372 | return 1; | |
2373 | } | |
2374 | ||
2375 | /* Fall through to generic two-operand expression case. */ | |
2376 | case EXPR_LIST: | |
2377 | case CALL: | |
2378 | case COMPARE: | |
2379 | case MINUS: | |
2380 | case MULT: | |
2381 | case DIV: case UDIV: | |
2382 | case MOD: case UMOD: | |
2383 | case AND: case IOR: case XOR: | |
2384 | case LSHIFT: case ASHIFT: case ROTATE: | |
2385 | case ASHIFTRT: case LSHIFTRT: case ROTATERT: | |
2386 | case NE: case EQ: | |
2387 | case GE: case GT: case GEU: case GTU: | |
2388 | case LE: case LT: case LEU: case LTU: | |
2389 | if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1))) | |
2390 | instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns); | |
2391 | loc = &XEXP (x, 0); | |
2392 | goto restart; | |
2393 | ||
2394 | case MEM: | |
2395 | /* Most cases of MEM that convert to valid addresses have already been | |
2396 | handled by our scan of regno_reg_rtx. The only special handling we | |
2397 | need here is to make a copy of the rtx to ensure it isn't being | |
2398 | shared if we have to change it to a pseudo. | |
2399 | ||
2400 | If the rtx is a simple reference to an address via a virtual register, | |
2401 | it can potentially be shared. In such cases, first try to make it | |
2402 | a valid address, which can also be shared. Otherwise, copy it and | |
2403 | proceed normally. | |
2404 | ||
2405 | First check for common cases that need no processing. These are | |
2406 | usually due to instantiation already being done on a previous instance | |
2407 | of a shared rtx. */ | |
2408 | ||
2409 | temp = XEXP (x, 0); | |
2410 | if (CONSTANT_ADDRESS_P (temp) | |
2411 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
2412 | || temp == arg_pointer_rtx | |
2413 | #endif | |
2414 | || temp == frame_pointer_rtx) | |
2415 | return 1; | |
2416 | ||
2417 | if (GET_CODE (temp) == PLUS | |
2418 | && CONSTANT_ADDRESS_P (XEXP (temp, 1)) | |
2419 | && (XEXP (temp, 0) == frame_pointer_rtx | |
2420 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
2421 | || XEXP (temp, 0) == arg_pointer_rtx | |
2422 | #endif | |
2423 | )) | |
2424 | return 1; | |
2425 | ||
2426 | if (temp == virtual_stack_vars_rtx | |
2427 | || temp == virtual_incoming_args_rtx | |
2428 | || (GET_CODE (temp) == PLUS | |
2429 | && CONSTANT_ADDRESS_P (XEXP (temp, 1)) | |
2430 | && (XEXP (temp, 0) == virtual_stack_vars_rtx | |
2431 | || XEXP (temp, 0) == virtual_incoming_args_rtx))) | |
2432 | { | |
2433 | /* This MEM may be shared. If the substitution can be done without | |
2434 | the need to generate new pseudos, we want to do it in place | |
2435 | so all copies of the shared rtx benefit. The call below will | |
2436 | only make substitutions if the resulting address is still | |
2437 | valid. | |
2438 | ||
2439 | Note that we cannot pass X as the object in the recursive call | |
2440 | since the insn being processed may not allow all valid | |
2441 | addresses. However, if we were not passed on object, we can | |
2442 | only modify X without copying it if X will have a valid | |
2443 | address. | |
2444 | ||
2445 | ??? Also note that this can still lose if OBJECT is an insn that | |
2446 | has less restrictions on an address that some other insn. | |
2447 | In that case, we will modify the shared address. This case | |
2448 | doesn't seem very likely, though. */ | |
2449 | ||
2450 | if (instantiate_virtual_regs_1 (&XEXP (x, 0), | |
2451 | object ? object : x, 0)) | |
2452 | return 1; | |
2453 | ||
2454 | /* Otherwise make a copy and process that copy. We copy the entire | |
2455 | RTL expression since it might be a PLUS which could also be | |
2456 | shared. */ | |
2457 | *loc = x = copy_rtx (x); | |
2458 | } | |
2459 | ||
2460 | /* Fall through to generic unary operation case. */ | |
2461 | case USE: | |
2462 | case CLOBBER: | |
2463 | case SUBREG: | |
2464 | case STRICT_LOW_PART: | |
2465 | case NEG: case NOT: | |
2466 | case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC: | |
2467 | case SIGN_EXTEND: case ZERO_EXTEND: | |
2468 | case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: | |
2469 | case FLOAT: case FIX: | |
2470 | case UNSIGNED_FIX: case UNSIGNED_FLOAT: | |
2471 | case ABS: | |
2472 | case SQRT: | |
2473 | case FFS: | |
2474 | /* These case either have just one operand or we know that we need not | |
2475 | check the rest of the operands. */ | |
2476 | loc = &XEXP (x, 0); | |
2477 | goto restart; | |
2478 | ||
2479 | case REG: | |
2480 | /* Try to replace with a PLUS. If that doesn't work, compute the sum | |
2481 | in front of this insn and substitute the temporary. */ | |
2482 | if (x == virtual_incoming_args_rtx) | |
2483 | new = arg_pointer_rtx, offset = in_arg_offset; | |
2484 | else if (x == virtual_stack_vars_rtx) | |
2485 | new = frame_pointer_rtx, offset = var_offset; | |
2486 | else if (x == virtual_stack_dynamic_rtx) | |
2487 | new = stack_pointer_rtx, offset = dynamic_offset; | |
2488 | else if (x == virtual_outgoing_args_rtx) | |
2489 | new = stack_pointer_rtx, offset = out_arg_offset; | |
2490 | ||
2491 | if (new) | |
2492 | { | |
2493 | temp = plus_constant (new, offset); | |
2494 | if (!validate_change (object, loc, temp, 0)) | |
2495 | { | |
2496 | if (! extra_insns) | |
2497 | return 0; | |
2498 | ||
2499 | start_sequence (); | |
2500 | temp = force_operand (temp, NULL_RTX); | |
2501 | seq = get_insns (); | |
2502 | end_sequence (); | |
2503 | ||
2504 | emit_insns_before (seq, object); | |
2505 | if (! validate_change (object, loc, temp, 0) | |
2506 | && ! validate_replace_rtx (x, temp, object)) | |
2507 | abort (); | |
2508 | } | |
2509 | } | |
2510 | ||
2511 | return 1; | |
2512 | } | |
2513 | ||
2514 | /* Scan all subexpressions. */ | |
2515 | fmt = GET_RTX_FORMAT (code); | |
2516 | for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) | |
2517 | if (*fmt == 'e') | |
2518 | { | |
2519 | if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns)) | |
2520 | return 0; | |
2521 | } | |
2522 | else if (*fmt == 'E') | |
2523 | for (j = 0; j < XVECLEN (x, i); j++) | |
2524 | if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object, | |
2525 | extra_insns)) | |
2526 | return 0; | |
2527 | ||
2528 | return 1; | |
2529 | } | |
2530 | \f | |
2531 | /* Optimization: assuming this function does not receive nonlocal gotos, | |
2532 | delete the handlers for such, as well as the insns to establish | |
2533 | and disestablish them. */ | |
2534 | ||
2535 | static void | |
2536 | delete_handlers () | |
2537 | { | |
2538 | rtx insn; | |
2539 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
2540 | { | |
2541 | /* Delete the handler by turning off the flag that would | |
2542 | prevent jump_optimize from deleting it. | |
2543 | Also permit deletion of the nonlocal labels themselves | |
2544 | if nothing local refers to them. */ | |
2545 | if (GET_CODE (insn) == CODE_LABEL) | |
2546 | LABEL_PRESERVE_P (insn) = 0; | |
2547 | if (GET_CODE (insn) == INSN | |
2548 | && ((nonlocal_goto_handler_slot != 0 | |
2549 | && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn))) | |
2550 | || (nonlocal_goto_stack_level != 0 | |
2551 | && reg_mentioned_p (nonlocal_goto_stack_level, | |
2552 | PATTERN (insn))))) | |
2553 | delete_insn (insn); | |
2554 | } | |
2555 | } | |
2556 | ||
2557 | /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels | |
2558 | of the current function. */ | |
2559 | ||
2560 | rtx | |
2561 | nonlocal_label_rtx_list () | |
2562 | { | |
2563 | tree t; | |
2564 | rtx x = 0; | |
2565 | ||
2566 | for (t = nonlocal_labels; t; t = TREE_CHAIN (t)) | |
2567 | x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x); | |
2568 | ||
2569 | return x; | |
2570 | } | |
2571 | \f | |
2572 | /* Output a USE for any register use in RTL. | |
2573 | This is used with -noreg to mark the extent of lifespan | |
2574 | of any registers used in a user-visible variable's DECL_RTL. */ | |
2575 | ||
2576 | void | |
2577 | use_variable (rtl) | |
2578 | rtx rtl; | |
2579 | { | |
2580 | if (GET_CODE (rtl) == REG) | |
2581 | /* This is a register variable. */ | |
2582 | emit_insn (gen_rtx (USE, VOIDmode, rtl)); | |
2583 | else if (GET_CODE (rtl) == MEM | |
2584 | && GET_CODE (XEXP (rtl, 0)) == REG | |
2585 | && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER | |
2586 | || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER) | |
2587 | && XEXP (rtl, 0) != current_function_internal_arg_pointer) | |
2588 | /* This is a variable-sized structure. */ | |
2589 | emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0))); | |
2590 | } | |
2591 | ||
2592 | /* Like use_variable except that it outputs the USEs after INSN | |
2593 | instead of at the end of the insn-chain. */ | |
2594 | ||
2595 | void | |
2596 | use_variable_after (rtl, insn) | |
2597 | rtx rtl, insn; | |
2598 | { | |
2599 | if (GET_CODE (rtl) == REG) | |
2600 | /* This is a register variable. */ | |
2601 | emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn); | |
2602 | else if (GET_CODE (rtl) == MEM | |
2603 | && GET_CODE (XEXP (rtl, 0)) == REG | |
2604 | && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER | |
2605 | || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER) | |
2606 | && XEXP (rtl, 0) != current_function_internal_arg_pointer) | |
2607 | /* This is a variable-sized structure. */ | |
2608 | emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn); | |
2609 | } | |
2610 | \f | |
2611 | int | |
2612 | max_parm_reg_num () | |
2613 | { | |
2614 | return max_parm_reg; | |
2615 | } | |
2616 | ||
2617 | /* Return the first insn following those generated by `assign_parms'. */ | |
2618 | ||
2619 | rtx | |
2620 | get_first_nonparm_insn () | |
2621 | { | |
2622 | if (last_parm_insn) | |
2623 | return NEXT_INSN (last_parm_insn); | |
2624 | return get_insns (); | |
2625 | } | |
2626 | ||
2627 | /* Return the first NOTE_INSN_BLOCK_BEG note in the function. | |
2628 | Crash if there is none. */ | |
2629 | ||
2630 | rtx | |
2631 | get_first_block_beg () | |
2632 | { | |
2633 | register rtx searcher; | |
2634 | register rtx insn = get_first_nonparm_insn (); | |
2635 | ||
2636 | for (searcher = insn; searcher; searcher = NEXT_INSN (searcher)) | |
2637 | if (GET_CODE (searcher) == NOTE | |
2638 | && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG) | |
2639 | return searcher; | |
2640 | ||
2641 | abort (); /* Invalid call to this function. (See comments above.) */ | |
2642 | return NULL_RTX; | |
2643 | } | |
2644 | ||
2645 | /* Return 1 if EXP returns an aggregate value, for which an address | |
2646 | must be passed to the function or returned by the function. */ | |
2647 | ||
2648 | int | |
2649 | aggregate_value_p (exp) | |
2650 | tree exp; | |
2651 | { | |
2652 | int i, regno, nregs; | |
2653 | rtx reg; | |
2654 | if (RETURN_IN_MEMORY (TREE_TYPE (exp))) | |
2655 | return 1; | |
2656 | if (flag_pcc_struct_return | |
2657 | && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE | |
2658 | || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE | |
2659 | || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE)) | |
2660 | return 1; | |
2661 | /* Make sure we have suitable call-clobbered regs to return | |
2662 | the value in; if not, we must return it in memory. */ | |
2663 | reg = hard_function_value (TREE_TYPE (exp), 0); | |
2664 | regno = REGNO (reg); | |
2665 | nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (TREE_TYPE (exp))); | |
2666 | for (i = 0; i < nregs; i++) | |
2667 | if (! call_used_regs[regno + i]) | |
2668 | return 1; | |
2669 | return 0; | |
2670 | } | |
2671 | \f | |
2672 | /* Assign RTL expressions to the function's parameters. | |
2673 | This may involve copying them into registers and using | |
2674 | those registers as the RTL for them. | |
2675 | ||
2676 | If SECOND_TIME is non-zero it means that this function is being | |
2677 | called a second time. This is done by integrate.c when a function's | |
2678 | compilation is deferred. We need to come back here in case the | |
2679 | FUNCTION_ARG macro computes items needed for the rest of the compilation | |
2680 | (such as changing which registers are fixed or caller-saved). But suppress | |
2681 | writing any insns or setting DECL_RTL of anything in this case. */ | |
2682 | ||
2683 | void | |
2684 | assign_parms (fndecl, second_time) | |
2685 | tree fndecl; | |
2686 | int second_time; | |
2687 | { | |
2688 | register tree parm; | |
2689 | register rtx entry_parm = 0; | |
2690 | register rtx stack_parm = 0; | |
2691 | CUMULATIVE_ARGS args_so_far; | |
2692 | enum machine_mode promoted_mode, passed_mode, nominal_mode; | |
2693 | int unsignedp; | |
2694 | /* Total space needed so far for args on the stack, | |
2695 | given as a constant and a tree-expression. */ | |
2696 | struct args_size stack_args_size; | |
2697 | tree fntype = TREE_TYPE (fndecl); | |
2698 | tree fnargs = DECL_ARGUMENTS (fndecl); | |
2699 | /* This is used for the arg pointer when referring to stack args. */ | |
2700 | rtx internal_arg_pointer; | |
2701 | /* This is a dummy PARM_DECL that we used for the function result if | |
2702 | the function returns a structure. */ | |
2703 | tree function_result_decl = 0; | |
2704 | int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1; | |
2705 | int varargs_setup = 0; | |
2706 | rtx conversion_insns = 0; | |
2707 | ||
2708 | /* Nonzero if the last arg is named `__builtin_va_alist', | |
2709 | which is used on some machines for old-fashioned non-ANSI varargs.h; | |
2710 | this should be stuck onto the stack as if it had arrived there. */ | |
2711 | int vararg | |
2712 | = (fnargs | |
2713 | && (parm = tree_last (fnargs)) != 0 | |
2714 | && DECL_NAME (parm) | |
2715 | && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)), | |
2716 | "__builtin_va_alist"))); | |
2717 | ||
2718 | /* Nonzero if function takes extra anonymous args. | |
2719 | This means the last named arg must be on the stack | |
2720 | right before the anonymous ones. */ | |
2721 | int stdarg | |
2722 | = (TYPE_ARG_TYPES (fntype) != 0 | |
2723 | && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) | |
2724 | != void_type_node)); | |
2725 | ||
2726 | /* If the reg that the virtual arg pointer will be translated into is | |
2727 | not a fixed reg or is the stack pointer, make a copy of the virtual | |
2728 | arg pointer, and address parms via the copy. The frame pointer is | |
2729 | considered fixed even though it is not marked as such. | |
2730 | ||
2731 | The second time through, simply use ap to avoid generating rtx. */ | |
2732 | ||
2733 | if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM | |
2734 | || ! (fixed_regs[ARG_POINTER_REGNUM] | |
2735 | || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)) | |
2736 | && ! second_time) | |
2737 | internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx); | |
2738 | else | |
2739 | internal_arg_pointer = virtual_incoming_args_rtx; | |
2740 | current_function_internal_arg_pointer = internal_arg_pointer; | |
2741 | ||
2742 | stack_args_size.constant = 0; | |
2743 | stack_args_size.var = 0; | |
2744 | ||
2745 | /* If struct value address is treated as the first argument, make it so. */ | |
2746 | if (aggregate_value_p (DECL_RESULT (fndecl)) | |
2747 | && ! current_function_returns_pcc_struct | |
2748 | && struct_value_incoming_rtx == 0) | |
2749 | { | |
2750 | tree type = build_pointer_type (fntype); | |
2751 | ||
2752 | function_result_decl = build_decl (PARM_DECL, NULL_TREE, type); | |
2753 | ||
2754 | DECL_ARG_TYPE (function_result_decl) = type; | |
2755 | TREE_CHAIN (function_result_decl) = fnargs; | |
2756 | fnargs = function_result_decl; | |
2757 | } | |
2758 | ||
2759 | parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx)); | |
2760 | bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx)); | |
2761 | ||
2762 | #ifdef INIT_CUMULATIVE_INCOMING_ARGS | |
2763 | INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX); | |
2764 | #else | |
2765 | INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX); | |
2766 | #endif | |
2767 | ||
2768 | /* We haven't yet found an argument that we must push and pretend the | |
2769 | caller did. */ | |
2770 | current_function_pretend_args_size = 0; | |
2771 | ||
2772 | for (parm = fnargs; parm; parm = TREE_CHAIN (parm)) | |
2773 | { | |
2774 | int aggregate | |
2775 | = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE | |
2776 | || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE | |
2777 | || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE | |
2778 | || TREE_CODE (TREE_TYPE (parm)) == QUAL_UNION_TYPE); | |
2779 | struct args_size stack_offset; | |
2780 | struct args_size arg_size; | |
2781 | int passed_pointer = 0; | |
2782 | tree passed_type = DECL_ARG_TYPE (parm); | |
2783 | ||
2784 | /* Set LAST_NAMED if this is last named arg before some | |
2785 | anonymous args. We treat it as if it were anonymous too. */ | |
2786 | int last_named = ((TREE_CHAIN (parm) == 0 | |
2787 | || DECL_NAME (TREE_CHAIN (parm)) == 0) | |
2788 | && (vararg || stdarg)); | |
2789 | ||
2790 | if (TREE_TYPE (parm) == error_mark_node | |
2791 | /* This can happen after weird syntax errors | |
2792 | or if an enum type is defined among the parms. */ | |
2793 | || TREE_CODE (parm) != PARM_DECL | |
2794 | || passed_type == NULL) | |
2795 | { | |
2796 | DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode, | |
2797 | const0_rtx); | |
2798 | TREE_USED (parm) = 1; | |
2799 | continue; | |
2800 | } | |
2801 | ||
2802 | /* For varargs.h function, save info about regs and stack space | |
2803 | used by the individual args, not including the va_alist arg. */ | |
2804 | if (vararg && last_named) | |
2805 | current_function_args_info = args_so_far; | |
2806 | ||
2807 | /* Find mode of arg as it is passed, and mode of arg | |
2808 | as it should be during execution of this function. */ | |
2809 | passed_mode = TYPE_MODE (passed_type); | |
2810 | nominal_mode = TYPE_MODE (TREE_TYPE (parm)); | |
2811 | ||
2812 | /* If the parm's mode is VOID, its value doesn't matter, | |
2813 | and avoid the usual things like emit_move_insn that could crash. */ | |
2814 | if (nominal_mode == VOIDmode) | |
2815 | { | |
2816 | DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx; | |
2817 | continue; | |
2818 | } | |
2819 | ||
2820 | #ifdef FUNCTION_ARG_PASS_BY_REFERENCE | |
2821 | /* See if this arg was passed by invisible reference. */ | |
2822 | if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode, | |
2823 | passed_type, ! last_named)) | |
2824 | { | |
2825 | passed_type = build_pointer_type (passed_type); | |
2826 | passed_pointer = 1; | |
2827 | passed_mode = nominal_mode = Pmode; | |
2828 | } | |
2829 | #endif | |
2830 | ||
2831 | promoted_mode = passed_mode; | |
2832 | ||
2833 | #ifdef PROMOTE_FUNCTION_ARGS | |
2834 | /* Compute the mode in which the arg is actually extended to. */ | |
2835 | if (TREE_CODE (passed_type) == INTEGER_TYPE | |
2836 | || TREE_CODE (passed_type) == ENUMERAL_TYPE | |
2837 | || TREE_CODE (passed_type) == BOOLEAN_TYPE | |
2838 | || TREE_CODE (passed_type) == CHAR_TYPE | |
2839 | || TREE_CODE (passed_type) == REAL_TYPE | |
2840 | || TREE_CODE (passed_type) == POINTER_TYPE | |
2841 | || TREE_CODE (passed_type) == OFFSET_TYPE) | |
2842 | { | |
2843 | unsignedp = TREE_UNSIGNED (passed_type); | |
2844 | PROMOTE_MODE (promoted_mode, unsignedp, passed_type); | |
2845 | } | |
2846 | #endif | |
2847 | ||
2848 | /* Let machine desc say which reg (if any) the parm arrives in. | |
2849 | 0 means it arrives on the stack. */ | |
2850 | #ifdef FUNCTION_INCOMING_ARG | |
2851 | entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode, | |
2852 | passed_type, ! last_named); | |
2853 | #else | |
2854 | entry_parm = FUNCTION_ARG (args_so_far, promoted_mode, | |
2855 | passed_type, ! last_named); | |
2856 | #endif | |
2857 | ||
2858 | if (entry_parm) | |
2859 | passed_mode = promoted_mode; | |
2860 | ||
2861 | #ifdef SETUP_INCOMING_VARARGS | |
2862 | /* If this is the last named parameter, do any required setup for | |
2863 | varargs or stdargs. We need to know about the case of this being an | |
2864 | addressable type, in which case we skip the registers it | |
2865 | would have arrived in. | |
2866 | ||
2867 | For stdargs, LAST_NAMED will be set for two parameters, the one that | |
2868 | is actually the last named, and the dummy parameter. We only | |
2869 | want to do this action once. | |
2870 | ||
2871 | Also, indicate when RTL generation is to be suppressed. */ | |
2872 | if (last_named && !varargs_setup) | |
2873 | { | |
2874 | SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type, | |
2875 | current_function_pretend_args_size, | |
2876 | second_time); | |
2877 | varargs_setup = 1; | |
2878 | } | |
2879 | #endif | |
2880 | ||
2881 | /* Determine parm's home in the stack, | |
2882 | in case it arrives in the stack or we should pretend it did. | |
2883 | ||
2884 | Compute the stack position and rtx where the argument arrives | |
2885 | and its size. | |
2886 | ||
2887 | There is one complexity here: If this was a parameter that would | |
2888 | have been passed in registers, but wasn't only because it is | |
2889 | __builtin_va_alist, we want locate_and_pad_parm to treat it as if | |
2890 | it came in a register so that REG_PARM_STACK_SPACE isn't skipped. | |
2891 | In this case, we call FUNCTION_ARG with NAMED set to 1 instead of | |
2892 | 0 as it was the previous time. */ | |
2893 | ||
2894 | locate_and_pad_parm (passed_mode, passed_type, | |
2895 | #ifdef STACK_PARMS_IN_REG_PARM_AREA | |
2896 | 1, | |
2897 | #else | |
2898 | #ifdef FUNCTION_INCOMING_ARG | |
2899 | FUNCTION_INCOMING_ARG (args_so_far, passed_mode, | |
2900 | passed_type, | |
2901 | (! last_named | |
2902 | || varargs_setup)) != 0, | |
2903 | #else | |
2904 | FUNCTION_ARG (args_so_far, passed_mode, | |
2905 | passed_type, | |
2906 | ! last_named || varargs_setup) != 0, | |
2907 | #endif | |
2908 | #endif | |
2909 | fndecl, &stack_args_size, &stack_offset, &arg_size); | |
2910 | ||
2911 | if (! second_time) | |
2912 | { | |
2913 | rtx offset_rtx = ARGS_SIZE_RTX (stack_offset); | |
2914 | ||
2915 | if (offset_rtx == const0_rtx) | |
2916 | stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer); | |
2917 | else | |
2918 | stack_parm = gen_rtx (MEM, passed_mode, | |
2919 | gen_rtx (PLUS, Pmode, | |
2920 | internal_arg_pointer, offset_rtx)); | |
2921 | ||
2922 | /* If this is a memory ref that contains aggregate components, | |
2923 | mark it as such for cse and loop optimize. */ | |
2924 | MEM_IN_STRUCT_P (stack_parm) = aggregate; | |
2925 | } | |
2926 | ||
2927 | /* If this parameter was passed both in registers and in the stack, | |
2928 | use the copy on the stack. */ | |
2929 | if (MUST_PASS_IN_STACK (passed_mode, passed_type)) | |
2930 | entry_parm = 0; | |
2931 | ||
2932 | #ifdef FUNCTION_ARG_PARTIAL_NREGS | |
2933 | /* If this parm was passed part in regs and part in memory, | |
2934 | pretend it arrived entirely in memory | |
2935 | by pushing the register-part onto the stack. | |
2936 | ||
2937 | In the special case of a DImode or DFmode that is split, | |
2938 | we could put it together in a pseudoreg directly, | |
2939 | but for now that's not worth bothering with. */ | |
2940 | ||
2941 | if (entry_parm) | |
2942 | { | |
2943 | int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode, | |
2944 | passed_type, ! last_named); | |
2945 | ||
2946 | if (nregs > 0) | |
2947 | { | |
2948 | current_function_pretend_args_size | |
2949 | = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1) | |
2950 | / (PARM_BOUNDARY / BITS_PER_UNIT) | |
2951 | * (PARM_BOUNDARY / BITS_PER_UNIT)); | |
2952 | ||
2953 | if (! second_time) | |
2954 | move_block_from_reg (REGNO (entry_parm), | |
2955 | validize_mem (stack_parm), nregs); | |
2956 | entry_parm = stack_parm; | |
2957 | } | |
2958 | } | |
2959 | #endif | |
2960 | ||
2961 | /* If we didn't decide this parm came in a register, | |
2962 | by default it came on the stack. */ | |
2963 | if (entry_parm == 0) | |
2964 | entry_parm = stack_parm; | |
2965 | ||
2966 | /* Record permanently how this parm was passed. */ | |
2967 | if (! second_time) | |
2968 | DECL_INCOMING_RTL (parm) = entry_parm; | |
2969 | ||
2970 | /* If there is actually space on the stack for this parm, | |
2971 | count it in stack_args_size; otherwise set stack_parm to 0 | |
2972 | to indicate there is no preallocated stack slot for the parm. */ | |
2973 | ||
2974 | if (entry_parm == stack_parm | |
2975 | #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE) | |
2976 | /* On some machines, even if a parm value arrives in a register | |
2977 | there is still an (uninitialized) stack slot allocated for it. | |
2978 | ||
2979 | ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell | |
2980 | whether this parameter already has a stack slot allocated, | |
2981 | because an arg block exists only if current_function_args_size | |
2982 | is larger than some threshhold, and we haven't calculated that | |
2983 | yet. So, for now, we just assume that stack slots never exist | |
2984 | in this case. */ | |
2985 | || REG_PARM_STACK_SPACE (fndecl) > 0 | |
2986 | #endif | |
2987 | ) | |
2988 | { | |
2989 | stack_args_size.constant += arg_size.constant; | |
2990 | if (arg_size.var) | |
2991 | ADD_PARM_SIZE (stack_args_size, arg_size.var); | |
2992 | } | |
2993 | else | |
2994 | /* No stack slot was pushed for this parm. */ | |
2995 | stack_parm = 0; | |
2996 | ||
2997 | /* Update info on where next arg arrives in registers. */ | |
2998 | ||
2999 | FUNCTION_ARG_ADVANCE (args_so_far, passed_mode, | |
3000 | passed_type, ! last_named); | |
3001 | ||
3002 | /* If this is our second time through, we are done with this parm. */ | |
3003 | if (second_time) | |
3004 | continue; | |
3005 | ||
3006 | /* If we can't trust the parm stack slot to be aligned enough | |
3007 | for its ultimate type, don't use that slot after entry. | |
3008 | We'll make another stack slot, if we need one. */ | |
3009 | { | |
3010 | int thisparm_boundary | |
3011 | = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type); | |
3012 | ||
3013 | if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary) | |
3014 | stack_parm = 0; | |
3015 | } | |
3016 | ||
3017 | /* Now adjust STACK_PARM to the mode and precise location | |
3018 | where this parameter should live during execution, | |
3019 | if we discover that it must live in the stack during execution. | |
3020 | To make debuggers happier on big-endian machines, we store | |
3021 | the value in the last bytes of the space available. */ | |
3022 | ||
3023 | if (nominal_mode != BLKmode && nominal_mode != passed_mode | |
3024 | && stack_parm != 0) | |
3025 | { | |
3026 | rtx offset_rtx; | |
3027 | ||
3028 | #if BYTES_BIG_ENDIAN | |
3029 | if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD) | |
3030 | stack_offset.constant += (GET_MODE_SIZE (passed_mode) | |
3031 | - GET_MODE_SIZE (nominal_mode)); | |
3032 | #endif | |
3033 | ||
3034 | offset_rtx = ARGS_SIZE_RTX (stack_offset); | |
3035 | if (offset_rtx == const0_rtx) | |
3036 | stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer); | |
3037 | else | |
3038 | stack_parm = gen_rtx (MEM, nominal_mode, | |
3039 | gen_rtx (PLUS, Pmode, | |
3040 | internal_arg_pointer, offset_rtx)); | |
3041 | ||
3042 | /* If this is a memory ref that contains aggregate components, | |
3043 | mark it as such for cse and loop optimize. */ | |
3044 | MEM_IN_STRUCT_P (stack_parm) = aggregate; | |
3045 | } | |
3046 | ||
3047 | /* ENTRY_PARM is an RTX for the parameter as it arrives, | |
3048 | in the mode in which it arrives. | |
3049 | STACK_PARM is an RTX for a stack slot where the parameter can live | |
3050 | during the function (in case we want to put it there). | |
3051 | STACK_PARM is 0 if no stack slot was pushed for it. | |
3052 | ||
3053 | Now output code if necessary to convert ENTRY_PARM to | |
3054 | the type in which this function declares it, | |
3055 | and store that result in an appropriate place, | |
3056 | which may be a pseudo reg, may be STACK_PARM, | |
3057 | or may be a local stack slot if STACK_PARM is 0. | |
3058 | ||
3059 | Set DECL_RTL to that place. */ | |
3060 | ||
3061 | if (nominal_mode == BLKmode) | |
3062 | { | |
3063 | /* If a BLKmode arrives in registers, copy it to a stack slot. */ | |
3064 | if (GET_CODE (entry_parm) == REG) | |
3065 | { | |
3066 | int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)), | |
3067 | UNITS_PER_WORD); | |
3068 | ||
3069 | /* Note that we will be storing an integral number of words. | |
3070 | So we have to be careful to ensure that we allocate an | |
3071 | integral number of words. We do this below in the | |
3072 | assign_stack_local if space was not allocated in the argument | |
3073 | list. If it was, this will not work if PARM_BOUNDARY is not | |
3074 | a multiple of BITS_PER_WORD. It isn't clear how to fix this | |
3075 | if it becomes a problem. */ | |
3076 | ||
3077 | if (stack_parm == 0) | |
3078 | { | |
3079 | stack_parm | |
3080 | = assign_stack_local (GET_MODE (entry_parm), size_stored, 0); | |
3081 | /* If this is a memory ref that contains aggregate components, | |
3082 | mark it as such for cse and loop optimize. */ | |
3083 | MEM_IN_STRUCT_P (stack_parm) = aggregate; | |
3084 | } | |
3085 | ||
3086 | else if (PARM_BOUNDARY % BITS_PER_WORD != 0) | |
3087 | abort (); | |
3088 | ||
3089 | move_block_from_reg (REGNO (entry_parm), | |
3090 | validize_mem (stack_parm), | |
3091 | size_stored / UNITS_PER_WORD); | |
3092 | } | |
3093 | DECL_RTL (parm) = stack_parm; | |
3094 | } | |
3095 | else if (! ((obey_regdecls && ! DECL_REGISTER (parm) | |
3096 | && ! DECL_INLINE (fndecl)) | |
3097 | /* layout_decl may set this. */ | |
3098 | || TREE_ADDRESSABLE (parm) | |
3099 | || TREE_SIDE_EFFECTS (parm) | |
3100 | /* If -ffloat-store specified, don't put explicit | |
3101 | float variables into registers. */ | |
3102 | || (flag_float_store | |
3103 | && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)) | |
3104 | /* Always assign pseudo to structure return or item passed | |
3105 | by invisible reference. */ | |
3106 | || passed_pointer || parm == function_result_decl) | |
3107 | { | |
3108 | /* Store the parm in a pseudoregister during the function, but we | |
3109 | may need to do it in a wider mode. */ | |
3110 | ||
3111 | register rtx parmreg; | |
3112 | ||
3113 | unsignedp = TREE_UNSIGNED (TREE_TYPE (parm)); | |
3114 | if (TREE_CODE (TREE_TYPE (parm)) == INTEGER_TYPE | |
3115 | || TREE_CODE (TREE_TYPE (parm)) == ENUMERAL_TYPE | |
3116 | || TREE_CODE (TREE_TYPE (parm)) == BOOLEAN_TYPE | |
3117 | || TREE_CODE (TREE_TYPE (parm)) == CHAR_TYPE | |
3118 | || TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE | |
3119 | || TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE | |
3120 | || TREE_CODE (TREE_TYPE (parm)) == OFFSET_TYPE) | |
3121 | { | |
3122 | PROMOTE_MODE (nominal_mode, unsignedp, TREE_TYPE (parm)); | |
3123 | } | |
3124 | ||
3125 | parmreg = gen_reg_rtx (nominal_mode); | |
3126 | REG_USERVAR_P (parmreg) = 1; | |
3127 | ||
3128 | /* If this was an item that we received a pointer to, set DECL_RTL | |
3129 | appropriately. */ | |
3130 | if (passed_pointer) | |
3131 | { | |
3132 | DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg); | |
3133 | MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate; | |
3134 | } | |
3135 | else | |
3136 | DECL_RTL (parm) = parmreg; | |
3137 | ||
3138 | /* Copy the value into the register. */ | |
3139 | if (GET_MODE (parmreg) != GET_MODE (entry_parm)) | |
3140 | { | |
3141 | /* If ENTRY_PARM is a hard register, it might be in a register | |
3142 | not valid for operating in its mode (e.g., an odd-numbered | |
3143 | register for a DFmode). In that case, moves are the only | |
3144 | thing valid, so we can't do a convert from there. This | |
3145 | occurs when the calling sequence allow such misaligned | |
3146 | usages. | |
3147 | ||
3148 | In addition, the conversion may involve a call, which could | |
3149 | clobber parameters which haven't been copied to pseudo | |
3150 | registers yet. Therefore, we must first copy the parm to | |
3151 | a pseudo reg here, and save the conversion until after all | |
3152 | parameters have been moved. */ | |
3153 | ||
3154 | rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm)); | |
3155 | ||
3156 | emit_move_insn (tempreg, validize_mem (entry_parm)); | |
3157 | ||
3158 | push_to_sequence (conversion_insns); | |
3159 | convert_move (parmreg, tempreg, unsignedp); | |
3160 | conversion_insns = get_insns (); | |
3161 | end_sequence (); | |
3162 | } | |
3163 | else | |
3164 | emit_move_insn (parmreg, validize_mem (entry_parm)); | |
3165 | ||
3166 | /* If we were passed a pointer but the actual value | |
3167 | can safely live in a register, put it in one. */ | |
3168 | if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode | |
3169 | && ! ((obey_regdecls && ! DECL_REGISTER (parm) | |
3170 | && ! DECL_INLINE (fndecl)) | |
3171 | /* layout_decl may set this. */ | |
3172 | || TREE_ADDRESSABLE (parm) | |
3173 | || TREE_SIDE_EFFECTS (parm) | |
3174 | /* If -ffloat-store specified, don't put explicit | |
3175 | float variables into registers. */ | |
3176 | || (flag_float_store | |
3177 | && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))) | |
3178 | { | |
3179 | /* We can't use nominal_mode, because it will have been set to | |
3180 | Pmode above. We must use the actual mode of the parm. */ | |
3181 | parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm))); | |
3182 | emit_move_insn (parmreg, DECL_RTL (parm)); | |
3183 | DECL_RTL (parm) = parmreg; | |
3184 | } | |
3185 | #ifdef FUNCTION_ARG_CALLEE_COPIES | |
3186 | /* If we are passed an arg by reference and it is our responsibility | |
3187 | to make a copy, do it now. | |
3188 | PASSED_TYPE and PASSED mode now refer to the pointer, not the | |
3189 | original argument, so we must recreate them in the call to | |
3190 | FUNCTION_ARG_CALLEE_COPIES. */ | |
3191 | /* ??? Later add code to handle the case that if the argument isn't | |
3192 | modified, don't do the copy. */ | |
3193 | ||
3194 | else if (passed_pointer | |
3195 | && FUNCTION_ARG_CALLEE_COPIES (args_so_far, | |
3196 | TYPE_MODE (DECL_ARG_TYPE (parm)), | |
3197 | DECL_ARG_TYPE (parm), | |
3198 | ! last_named)) | |
3199 | { | |
3200 | rtx copy; | |
3201 | tree type = DECL_ARG_TYPE (parm); | |
3202 | ||
3203 | /* This sequence may involve a library call perhaps clobbering | |
3204 | registers that haven't been copied to pseudos yet. */ | |
3205 | ||
3206 | push_to_sequence (conversion_insns); | |
3207 | ||
3208 | if (TYPE_SIZE (type) == 0 | |
3209 | || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) | |
3210 | { | |
3211 | /* This is a variable sized object. */ | |
3212 | /* ??? Can we use expr_size here? */ | |
3213 | rtx size_rtx = expand_expr (size_in_bytes (type), NULL_RTX, | |
3214 | TYPE_MODE (sizetype), 0); | |
3215 | ||
3216 | copy = gen_rtx (MEM, BLKmode, | |
3217 | allocate_dynamic_stack_space (size_rtx, NULL_RTX, | |
3218 | TYPE_ALIGN (type))); | |
3219 | } | |
3220 | else | |
3221 | { | |
3222 | int size = int_size_in_bytes (type); | |
3223 | copy = assign_stack_temp (TYPE_MODE (type), size, 1); | |
3224 | } | |
3225 | ||
3226 | store_expr (parm, copy, 0); | |
3227 | emit_move_insn (parmreg, XEXP (copy, 0)); | |
3228 | conversion_insns = get_insns (); | |
3229 | end_sequence (); | |
3230 | } | |
3231 | #endif /* FUNCTION_ARG_CALLEE_COPIES */ | |
3232 | ||
3233 | /* In any case, record the parm's desired stack location | |
3234 | in case we later discover it must live in the stack. */ | |
3235 | if (REGNO (parmreg) >= nparmregs) | |
3236 | { | |
3237 | rtx *new; | |
3238 | nparmregs = REGNO (parmreg) + 5; | |
3239 | new = (rtx *) oballoc (nparmregs * sizeof (rtx)); | |
3240 | bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx)); | |
3241 | parm_reg_stack_loc = new; | |
3242 | } | |
3243 | parm_reg_stack_loc[REGNO (parmreg)] = stack_parm; | |
3244 | ||
3245 | /* Mark the register as eliminable if we did no conversion | |
3246 | and it was copied from memory at a fixed offset, | |
3247 | and the arg pointer was not copied to a pseudo-reg. | |
3248 | If the arg pointer is a pseudo reg or the offset formed | |
3249 | an invalid address, such memory-equivalences | |
3250 | as we make here would screw up life analysis for it. */ | |
3251 | if (nominal_mode == passed_mode | |
3252 | && GET_CODE (entry_parm) == MEM | |
3253 | && entry_parm == stack_parm | |
3254 | && stack_offset.var == 0 | |
3255 | && reg_mentioned_p (virtual_incoming_args_rtx, | |
3256 | XEXP (entry_parm, 0))) | |
3257 | REG_NOTES (get_last_insn ()) | |
3258 | = gen_rtx (EXPR_LIST, REG_EQUIV, | |
3259 | entry_parm, REG_NOTES (get_last_insn ())); | |
3260 | ||
3261 | /* For pointer data type, suggest pointer register. */ | |
3262 | if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE) | |
3263 | mark_reg_pointer (parmreg); | |
3264 | } | |
3265 | else | |
3266 | { | |
3267 | /* Value must be stored in the stack slot STACK_PARM | |
3268 | during function execution. */ | |
3269 | ||
3270 | if (passed_mode != nominal_mode) | |
3271 | { | |
3272 | /* Conversion is required. */ | |
3273 | rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm)); | |
3274 | ||
3275 | emit_move_insn (tempreg, validize_mem (entry_parm)); | |
3276 | ||
3277 | push_to_sequence (conversion_insns); | |
3278 | entry_parm = convert_to_mode (nominal_mode, tempreg, | |
3279 | TREE_UNSIGNED (TREE_TYPE (parm))); | |
3280 | conversion_insns = get_insns (); | |
3281 | end_sequence (); | |
3282 | } | |
3283 | ||
3284 | if (entry_parm != stack_parm) | |
3285 | { | |
3286 | if (stack_parm == 0) | |
3287 | { | |
3288 | stack_parm | |
3289 | = assign_stack_local (GET_MODE (entry_parm), | |
3290 | GET_MODE_SIZE (GET_MODE (entry_parm)), 0); | |
3291 | /* If this is a memory ref that contains aggregate components, | |
3292 | mark it as such for cse and loop optimize. */ | |
3293 | MEM_IN_STRUCT_P (stack_parm) = aggregate; | |
3294 | } | |
3295 | ||
3296 | if (passed_mode != nominal_mode) | |
3297 | { | |
3298 | push_to_sequence (conversion_insns); | |
3299 | emit_move_insn (validize_mem (stack_parm), | |
3300 | validize_mem (entry_parm)); | |
3301 | conversion_insns = get_insns (); | |
3302 | end_sequence (); | |
3303 | } | |
3304 | else | |
3305 | emit_move_insn (validize_mem (stack_parm), | |
3306 | validize_mem (entry_parm)); | |
3307 | } | |
3308 | ||
3309 | DECL_RTL (parm) = stack_parm; | |
3310 | } | |
3311 | ||
3312 | /* If this "parameter" was the place where we are receiving the | |
3313 | function's incoming structure pointer, set up the result. */ | |
3314 | if (parm == function_result_decl) | |
3315 | DECL_RTL (DECL_RESULT (fndecl)) | |
3316 | = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm)); | |
3317 | ||
3318 | if (TREE_THIS_VOLATILE (parm)) | |
3319 | MEM_VOLATILE_P (DECL_RTL (parm)) = 1; | |
3320 | if (TREE_READONLY (parm)) | |
3321 | RTX_UNCHANGING_P (DECL_RTL (parm)) = 1; | |
3322 | } | |
3323 | ||
3324 | /* Output all parameter conversion instructions (possibly including calls) | |
3325 | now that all parameters have been copied out of hard registers. */ | |
3326 | emit_insns (conversion_insns); | |
3327 | ||
3328 | max_parm_reg = max_reg_num (); | |
3329 | last_parm_insn = get_last_insn (); | |
3330 | ||
3331 | current_function_args_size = stack_args_size.constant; | |
3332 | ||
3333 | /* Adjust function incoming argument size for alignment and | |
3334 | minimum length. */ | |
3335 | ||
3336 | #ifdef REG_PARM_STACK_SPACE | |
3337 | #ifndef MAYBE_REG_PARM_STACK_SPACE | |
3338 | current_function_args_size = MAX (current_function_args_size, | |
3339 | REG_PARM_STACK_SPACE (fndecl)); | |
3340 | #endif | |
3341 | #endif | |
3342 | ||
3343 | #ifdef STACK_BOUNDARY | |
3344 | #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT) | |
3345 | ||
3346 | current_function_args_size | |
3347 | = ((current_function_args_size + STACK_BYTES - 1) | |
3348 | / STACK_BYTES) * STACK_BYTES; | |
3349 | #endif | |
3350 | ||
3351 | #ifdef ARGS_GROW_DOWNWARD | |
3352 | current_function_arg_offset_rtx | |
3353 | = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant) | |
3354 | : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var, | |
3355 | size_int (-stack_args_size.constant)), | |
3356 | NULL_RTX, VOIDmode, 0)); | |
3357 | #else | |
3358 | current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size); | |
3359 | #endif | |
3360 | ||
3361 | /* See how many bytes, if any, of its args a function should try to pop | |
3362 | on return. */ | |
3363 | ||
3364 | current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl), | |
3365 | current_function_args_size); | |
3366 | ||
3367 | /* For stdarg.h function, save info about regs and stack space | |
3368 | used by the named args. */ | |
3369 | ||
3370 | if (stdarg) | |
3371 | current_function_args_info = args_so_far; | |
3372 | ||
3373 | /* Set the rtx used for the function return value. Put this in its | |
3374 | own variable so any optimizers that need this information don't have | |
3375 | to include tree.h. Do this here so it gets done when an inlined | |
3376 | function gets output. */ | |
3377 | ||
3378 | current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl)); | |
3379 | } | |
3380 | \f | |
3381 | /* Indicate whether REGNO is an incoming argument to the current function | |
3382 | that was promoted to a wider mode. If so, return the RTX for the | |
3383 | register (to get its mode). PMODE and PUNSIGNEDP are set to the mode | |
3384 | that REGNO is promoted from and whether the promotion was signed or | |
3385 | unsigned. */ | |
3386 | ||
3387 | #ifdef PROMOTE_FUNCTION_ARGS | |
3388 | ||
3389 | rtx | |
3390 | promoted_input_arg (regno, pmode, punsignedp) | |
3391 | int regno; | |
3392 | enum machine_mode *pmode; | |
3393 | int *punsignedp; | |
3394 | { | |
3395 | tree arg; | |
3396 | ||
3397 | for (arg = DECL_ARGUMENTS (current_function_decl); arg; | |
3398 | arg = TREE_CHAIN (arg)) | |
3399 | if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG | |
3400 | && REGNO (DECL_INCOMING_RTL (arg)) == regno | |
3401 | && (TREE_CODE (TREE_TYPE (arg)) == INTEGER_TYPE | |
3402 | || TREE_CODE (TREE_TYPE (arg)) == ENUMERAL_TYPE | |
3403 | || TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE | |
3404 | || TREE_CODE (TREE_TYPE (arg)) == CHAR_TYPE | |
3405 | || TREE_CODE (TREE_TYPE (arg)) == REAL_TYPE | |
3406 | || TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE | |
3407 | || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE)) | |
3408 | { | |
3409 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg)); | |
3410 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg)); | |
3411 | ||
3412 | PROMOTE_MODE (mode, unsignedp, TREE_TYPE (arg)); | |
3413 | if (mode == GET_MODE (DECL_INCOMING_RTL (arg)) | |
3414 | && mode != DECL_MODE (arg)) | |
3415 | { | |
3416 | *pmode = DECL_MODE (arg); | |
3417 | *punsignedp = unsignedp; | |
3418 | return DECL_INCOMING_RTL (arg); | |
3419 | } | |
3420 | } | |
3421 | ||
3422 | return 0; | |
3423 | } | |
3424 | ||
3425 | #endif | |
3426 | \f | |
3427 | /* Compute the size and offset from the start of the stacked arguments for a | |
3428 | parm passed in mode PASSED_MODE and with type TYPE. | |
3429 | ||
3430 | INITIAL_OFFSET_PTR points to the current offset into the stacked | |
3431 | arguments. | |
3432 | ||
3433 | The starting offset and size for this parm are returned in *OFFSET_PTR | |
3434 | and *ARG_SIZE_PTR, respectively. | |
3435 | ||
3436 | IN_REGS is non-zero if the argument will be passed in registers. It will | |
3437 | never be set if REG_PARM_STACK_SPACE is not defined. | |
3438 | ||
3439 | FNDECL is the function in which the argument was defined. | |
3440 | ||
3441 | There are two types of rounding that are done. The first, controlled by | |
3442 | FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument | |
3443 | list to be aligned to the specific boundary (in bits). This rounding | |
3444 | affects the initial and starting offsets, but not the argument size. | |
3445 | ||
3446 | The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY, | |
3447 | optionally rounds the size of the parm to PARM_BOUNDARY. The | |
3448 | initial offset is not affected by this rounding, while the size always | |
3449 | is and the starting offset may be. */ | |
3450 | ||
3451 | /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case; | |
3452 | initial_offset_ptr is positive because locate_and_pad_parm's | |
3453 | callers pass in the total size of args so far as | |
3454 | initial_offset_ptr. arg_size_ptr is always positive.*/ | |
3455 | ||
3456 | static void pad_to_arg_alignment (), pad_below (); | |
3457 | ||
3458 | void | |
3459 | locate_and_pad_parm (passed_mode, type, in_regs, fndecl, | |
3460 | initial_offset_ptr, offset_ptr, arg_size_ptr) | |
3461 | enum machine_mode passed_mode; | |
3462 | tree type; | |
3463 | int in_regs; | |
3464 | tree fndecl; | |
3465 | struct args_size *initial_offset_ptr; | |
3466 | struct args_size *offset_ptr; | |
3467 | struct args_size *arg_size_ptr; | |
3468 | { | |
3469 | tree sizetree | |
3470 | = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode)); | |
3471 | enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type); | |
3472 | int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type); | |
3473 | int boundary_in_bytes = boundary / BITS_PER_UNIT; | |
3474 | int reg_parm_stack_space = 0; | |
3475 | ||
3476 | #ifdef REG_PARM_STACK_SPACE | |
3477 | /* If we have found a stack parm before we reach the end of the | |
3478 | area reserved for registers, skip that area. */ | |
3479 | if (! in_regs) | |
3480 | { | |
3481 | #ifdef MAYBE_REG_PARM_STACK_SPACE | |
3482 | reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE; | |
3483 | #else | |
3484 | reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl); | |
3485 | #endif | |
3486 | if (reg_parm_stack_space > 0) | |
3487 | { | |
3488 | if (initial_offset_ptr->var) | |
3489 | { | |
3490 | initial_offset_ptr->var | |
3491 | = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr), | |
3492 | size_int (reg_parm_stack_space)); | |
3493 | initial_offset_ptr->constant = 0; | |
3494 | } | |
3495 | else if (initial_offset_ptr->constant < reg_parm_stack_space) | |
3496 | initial_offset_ptr->constant = reg_parm_stack_space; | |
3497 | } | |
3498 | } | |
3499 | #endif /* REG_PARM_STACK_SPACE */ | |
3500 | ||
3501 | arg_size_ptr->var = 0; | |
3502 | arg_size_ptr->constant = 0; | |
3503 | ||
3504 | #ifdef ARGS_GROW_DOWNWARD | |
3505 | if (initial_offset_ptr->var) | |
3506 | { | |
3507 | offset_ptr->constant = 0; | |
3508 | offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node, | |
3509 | initial_offset_ptr->var); | |
3510 | } | |
3511 | else | |
3512 | { | |
3513 | offset_ptr->constant = - initial_offset_ptr->constant; | |
3514 | offset_ptr->var = 0; | |
3515 | } | |
3516 | if (where_pad == upward | |
3517 | && (TREE_CODE (sizetree) != INTEGER_CST | |
3518 | || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY))) | |
3519 | sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); | |
3520 | SUB_PARM_SIZE (*offset_ptr, sizetree); | |
3521 | if (where_pad != downward) | |
3522 | pad_to_arg_alignment (offset_ptr, boundary); | |
3523 | if (initial_offset_ptr->var) | |
3524 | { | |
3525 | arg_size_ptr->var = size_binop (MINUS_EXPR, | |
3526 | size_binop (MINUS_EXPR, | |
3527 | integer_zero_node, | |
3528 | initial_offset_ptr->var), | |
3529 | offset_ptr->var); | |
3530 | } | |
3531 | else | |
3532 | { | |
3533 | arg_size_ptr->constant = (- initial_offset_ptr->constant - | |
3534 | offset_ptr->constant); | |
3535 | } | |
3536 | /* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */ | |
3537 | if (where_pad == downward) | |
3538 | pad_below (arg_size_ptr, passed_mode, sizetree); | |
3539 | #else /* !ARGS_GROW_DOWNWARD */ | |
3540 | pad_to_arg_alignment (initial_offset_ptr, boundary); | |
3541 | *offset_ptr = *initial_offset_ptr; | |
3542 | if (where_pad == downward) | |
3543 | pad_below (offset_ptr, passed_mode, sizetree); | |
3544 | ||
3545 | #ifdef PUSH_ROUNDING | |
3546 | if (passed_mode != BLKmode) | |
3547 | sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree))); | |
3548 | #endif | |
3549 | ||
3550 | if (where_pad != none | |
3551 | && (TREE_CODE (sizetree) != INTEGER_CST | |
3552 | || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY))) | |
3553 | sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); | |
3554 | ||
3555 | ADD_PARM_SIZE (*arg_size_ptr, sizetree); | |
3556 | #endif /* ARGS_GROW_DOWNWARD */ | |
3557 | } | |
3558 | ||
3559 | /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY. | |
3560 | BOUNDARY is measured in bits, but must be a multiple of a storage unit. */ | |
3561 | ||
3562 | static void | |
3563 | pad_to_arg_alignment (offset_ptr, boundary) | |
3564 | struct args_size *offset_ptr; | |
3565 | int boundary; | |
3566 | { | |
3567 | int boundary_in_bytes = boundary / BITS_PER_UNIT; | |
3568 | ||
3569 | if (boundary > BITS_PER_UNIT) | |
3570 | { | |
3571 | if (offset_ptr->var) | |
3572 | { | |
3573 | offset_ptr->var = | |
3574 | #ifdef ARGS_GROW_DOWNWARD | |
3575 | round_down | |
3576 | #else | |
3577 | round_up | |
3578 | #endif | |
3579 | (ARGS_SIZE_TREE (*offset_ptr), | |
3580 | boundary / BITS_PER_UNIT); | |
3581 | offset_ptr->constant = 0; /*?*/ | |
3582 | } | |
3583 | else | |
3584 | offset_ptr->constant = | |
3585 | #ifdef ARGS_GROW_DOWNWARD | |
3586 | FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes); | |
3587 | #else | |
3588 | CEIL_ROUND (offset_ptr->constant, boundary_in_bytes); | |
3589 | #endif | |
3590 | } | |
3591 | } | |
3592 | ||
3593 | static void | |
3594 | pad_below (offset_ptr, passed_mode, sizetree) | |
3595 | struct args_size *offset_ptr; | |
3596 | enum machine_mode passed_mode; | |
3597 | tree sizetree; | |
3598 | { | |
3599 | if (passed_mode != BLKmode) | |
3600 | { | |
3601 | if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY) | |
3602 | offset_ptr->constant | |
3603 | += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1) | |
3604 | / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT) | |
3605 | - GET_MODE_SIZE (passed_mode)); | |
3606 | } | |
3607 | else | |
3608 | { | |
3609 | if (TREE_CODE (sizetree) != INTEGER_CST | |
3610 | || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY) | |
3611 | { | |
3612 | /* Round the size up to multiple of PARM_BOUNDARY bits. */ | |
3613 | tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); | |
3614 | /* Add it in. */ | |
3615 | ADD_PARM_SIZE (*offset_ptr, s2); | |
3616 | SUB_PARM_SIZE (*offset_ptr, sizetree); | |
3617 | } | |
3618 | } | |
3619 | } | |
3620 | ||
3621 | static tree | |
3622 | round_down (value, divisor) | |
3623 | tree value; | |
3624 | int divisor; | |
3625 | { | |
3626 | return size_binop (MULT_EXPR, | |
3627 | size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)), | |
3628 | size_int (divisor)); | |
3629 | } | |
3630 | \f | |
3631 | /* Walk the tree of blocks describing the binding levels within a function | |
3632 | and warn about uninitialized variables. | |
3633 | This is done after calling flow_analysis and before global_alloc | |
3634 | clobbers the pseudo-regs to hard regs. */ | |
3635 | ||
3636 | void | |
3637 | uninitialized_vars_warning (block) | |
3638 | tree block; | |
3639 | { | |
3640 | register tree decl, sub; | |
3641 | for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl)) | |
3642 | { | |
3643 | if (TREE_CODE (decl) == VAR_DECL | |
3644 | /* These warnings are unreliable for and aggregates | |
3645 | because assigning the fields one by one can fail to convince | |
3646 | flow.c that the entire aggregate was initialized. | |
3647 | Unions are troublesome because members may be shorter. */ | |
3648 | && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE | |
3649 | && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE | |
3650 | && TREE_CODE (TREE_TYPE (decl)) != QUAL_UNION_TYPE | |
3651 | && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE | |
3652 | && DECL_RTL (decl) != 0 | |
3653 | && GET_CODE (DECL_RTL (decl)) == REG | |
3654 | && regno_uninitialized (REGNO (DECL_RTL (decl)))) | |
3655 | warning_with_decl (decl, | |
3656 | "`%s' may be used uninitialized in this function"); | |
3657 | if (TREE_CODE (decl) == VAR_DECL | |
3658 | && DECL_RTL (decl) != 0 | |
3659 | && GET_CODE (DECL_RTL (decl)) == REG | |
3660 | && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) | |
3661 | warning_with_decl (decl, | |
3662 | "variable `%s' may be clobbered by `longjmp'"); | |
3663 | } | |
3664 | for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub)) | |
3665 | uninitialized_vars_warning (sub); | |
3666 | } | |
3667 | ||
3668 | /* Do the appropriate part of uninitialized_vars_warning | |
3669 | but for arguments instead of local variables. */ | |
3670 | ||
3671 | void | |
3672 | setjmp_args_warning (block) | |
3673 | tree block; | |
3674 | { | |
3675 | register tree decl; | |
3676 | for (decl = DECL_ARGUMENTS (current_function_decl); | |
3677 | decl; decl = TREE_CHAIN (decl)) | |
3678 | if (DECL_RTL (decl) != 0 | |
3679 | && GET_CODE (DECL_RTL (decl)) == REG | |
3680 | && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) | |
3681 | warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp'"); | |
3682 | } | |
3683 | ||
3684 | /* If this function call setjmp, put all vars into the stack | |
3685 | unless they were declared `register'. */ | |
3686 | ||
3687 | void | |
3688 | setjmp_protect (block) | |
3689 | tree block; | |
3690 | { | |
3691 | register tree decl, sub; | |
3692 | for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl)) | |
3693 | if ((TREE_CODE (decl) == VAR_DECL | |
3694 | || TREE_CODE (decl) == PARM_DECL) | |
3695 | && DECL_RTL (decl) != 0 | |
3696 | && GET_CODE (DECL_RTL (decl)) == REG | |
3697 | /* If this variable came from an inline function, it must be | |
3698 | that it's life doesn't overlap the setjmp. If there was a | |
3699 | setjmp in the function, it would already be in memory. We | |
3700 | must exclude such variable because their DECL_RTL might be | |
3701 | set to strange things such as virtual_stack_vars_rtx. */ | |
3702 | && ! DECL_FROM_INLINE (decl) | |
3703 | && ( | |
3704 | #ifdef NON_SAVING_SETJMP | |
3705 | /* If longjmp doesn't restore the registers, | |
3706 | don't put anything in them. */ | |
3707 | NON_SAVING_SETJMP | |
3708 | || | |
3709 | #endif | |
3710 | ! DECL_REGISTER (decl))) | |
3711 | put_var_into_stack (decl); | |
3712 | for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub)) | |
3713 | setjmp_protect (sub); | |
3714 | } | |
3715 | \f | |
3716 | /* Like the previous function, but for args instead of local variables. */ | |
3717 | ||
3718 | void | |
3719 | setjmp_protect_args () | |
3720 | { | |
3721 | register tree decl, sub; | |
3722 | for (decl = DECL_ARGUMENTS (current_function_decl); | |
3723 | decl; decl = TREE_CHAIN (decl)) | |
3724 | if ((TREE_CODE (decl) == VAR_DECL | |
3725 | || TREE_CODE (decl) == PARM_DECL) | |
3726 | && DECL_RTL (decl) != 0 | |
3727 | && GET_CODE (DECL_RTL (decl)) == REG | |
3728 | && ( | |
3729 | /* If longjmp doesn't restore the registers, | |
3730 | don't put anything in them. */ | |
3731 | #ifdef NON_SAVING_SETJMP | |
3732 | NON_SAVING_SETJMP | |
3733 | || | |
3734 | #endif | |
3735 | ! DECL_REGISTER (decl))) | |
3736 | put_var_into_stack (decl); | |
3737 | } | |
3738 | \f | |
3739 | /* Return the context-pointer register corresponding to DECL, | |
3740 | or 0 if it does not need one. */ | |
3741 | ||
3742 | rtx | |
3743 | lookup_static_chain (decl) | |
3744 | tree decl; | |
3745 | { | |
3746 | tree context = decl_function_context (decl); | |
3747 | tree link; | |
3748 | ||
3749 | if (context == 0) | |
3750 | return 0; | |
3751 | ||
3752 | /* We treat inline_function_decl as an alias for the current function | |
3753 | because that is the inline function whose vars, types, etc. | |
3754 | are being merged into the current function. | |
3755 | See expand_inline_function. */ | |
3756 | if (context == current_function_decl || context == inline_function_decl) | |
3757 | return virtual_stack_vars_rtx; | |
3758 | ||
3759 | for (link = context_display; link; link = TREE_CHAIN (link)) | |
3760 | if (TREE_PURPOSE (link) == context) | |
3761 | return RTL_EXPR_RTL (TREE_VALUE (link)); | |
3762 | ||
3763 | abort (); | |
3764 | } | |
3765 | \f | |
3766 | /* Convert a stack slot address ADDR for variable VAR | |
3767 | (from a containing function) | |
3768 | into an address valid in this function (using a static chain). */ | |
3769 | ||
3770 | rtx | |
3771 | fix_lexical_addr (addr, var) | |
3772 | rtx addr; | |
3773 | tree var; | |
3774 | { | |
3775 | rtx basereg; | |
3776 | int displacement; | |
3777 | tree context = decl_function_context (var); | |
3778 | struct function *fp; | |
3779 | rtx base = 0; | |
3780 | ||
3781 | /* If this is the present function, we need not do anything. */ | |
3782 | if (context == current_function_decl || context == inline_function_decl) | |
3783 | return addr; | |
3784 | ||
3785 | for (fp = outer_function_chain; fp; fp = fp->next) | |
3786 | if (fp->decl == context) | |
3787 | break; | |
3788 | ||
3789 | if (fp == 0) | |
3790 | abort (); | |
3791 | ||
3792 | /* Decode given address as base reg plus displacement. */ | |
3793 | if (GET_CODE (addr) == REG) | |
3794 | basereg = addr, displacement = 0; | |
3795 | else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT) | |
3796 | basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1)); | |
3797 | else | |
3798 | abort (); | |
3799 | ||
3800 | /* We accept vars reached via the containing function's | |
3801 | incoming arg pointer and via its stack variables pointer. */ | |
3802 | if (basereg == fp->internal_arg_pointer) | |
3803 | { | |
3804 | /* If reached via arg pointer, get the arg pointer value | |
3805 | out of that function's stack frame. | |
3806 | ||
3807 | There are two cases: If a separate ap is needed, allocate a | |
3808 | slot in the outer function for it and dereference it that way. | |
3809 | This is correct even if the real ap is actually a pseudo. | |
3810 | Otherwise, just adjust the offset from the frame pointer to | |
3811 | compensate. */ | |
3812 | ||
3813 | #ifdef NEED_SEPARATE_AP | |
3814 | rtx addr; | |
3815 | ||
3816 | if (fp->arg_pointer_save_area == 0) | |
3817 | fp->arg_pointer_save_area | |
3818 | = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp); | |
3819 | ||
3820 | addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var); | |
3821 | addr = memory_address (Pmode, addr); | |
3822 | ||
3823 | base = copy_to_reg (gen_rtx (MEM, Pmode, addr)); | |
3824 | #else | |
3825 | displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET); | |
3826 | base = lookup_static_chain (var); | |
3827 | #endif | |
3828 | } | |
3829 | ||
3830 | else if (basereg == virtual_stack_vars_rtx) | |
3831 | { | |
3832 | /* This is the same code as lookup_static_chain, duplicated here to | |
3833 | avoid an extra call to decl_function_context. */ | |
3834 | tree link; | |
3835 | ||
3836 | for (link = context_display; link; link = TREE_CHAIN (link)) | |
3837 | if (TREE_PURPOSE (link) == context) | |
3838 | { | |
3839 | base = RTL_EXPR_RTL (TREE_VALUE (link)); | |
3840 | break; | |
3841 | } | |
3842 | } | |
3843 | ||
3844 | if (base == 0) | |
3845 | abort (); | |
3846 | ||
3847 | /* Use same offset, relative to appropriate static chain or argument | |
3848 | pointer. */ | |
3849 | return plus_constant (base, displacement); | |
3850 | } | |
3851 | \f | |
3852 | /* Return the address of the trampoline for entering nested fn FUNCTION. | |
3853 | If necessary, allocate a trampoline (in the stack frame) | |
3854 | and emit rtl to initialize its contents (at entry to this function). */ | |
3855 | ||
3856 | rtx | |
3857 | trampoline_address (function) | |
3858 | tree function; | |
3859 | { | |
3860 | tree link; | |
3861 | tree rtlexp; | |
3862 | rtx tramp; | |
3863 | struct function *fp; | |
3864 | tree fn_context; | |
3865 | ||
3866 | /* Find an existing trampoline and return it. */ | |
3867 | for (link = trampoline_list; link; link = TREE_CHAIN (link)) | |
3868 | if (TREE_PURPOSE (link) == function) | |
3869 | return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0); | |
3870 | for (fp = outer_function_chain; fp; fp = fp->next) | |
3871 | for (link = fp->trampoline_list; link; link = TREE_CHAIN (link)) | |
3872 | if (TREE_PURPOSE (link) == function) | |
3873 | { | |
3874 | tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0), | |
3875 | function); | |
3876 | return round_trampoline_addr (tramp); | |
3877 | } | |
3878 | ||
3879 | /* None exists; we must make one. */ | |
3880 | ||
3881 | /* Find the `struct function' for the function containing FUNCTION. */ | |
3882 | fp = 0; | |
3883 | fn_context = decl_function_context (function); | |
3884 | if (fn_context != current_function_decl) | |
3885 | for (fp = outer_function_chain; fp; fp = fp->next) | |
3886 | if (fp->decl == fn_context) | |
3887 | break; | |
3888 | ||
3889 | /* Allocate run-time space for this trampoline | |
3890 | (usually in the defining function's stack frame). */ | |
3891 | #ifdef ALLOCATE_TRAMPOLINE | |
3892 | tramp = ALLOCATE_TRAMPOLINE (fp); | |
3893 | #else | |
3894 | /* If rounding needed, allocate extra space | |
3895 | to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */ | |
3896 | #ifdef TRAMPOLINE_ALIGNMENT | |
3897 | #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1) | |
3898 | #else | |
3899 | #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE) | |
3900 | #endif | |
3901 | if (fp != 0) | |
3902 | tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp); | |
3903 | else | |
3904 | tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0); | |
3905 | #endif | |
3906 | ||
3907 | /* Record the trampoline for reuse and note it for later initialization | |
3908 | by expand_function_end. */ | |
3909 | if (fp != 0) | |
3910 | { | |
3911 | push_obstacks (fp->current_obstack, fp->function_maybepermanent_obstack); | |
3912 | rtlexp = make_node (RTL_EXPR); | |
3913 | RTL_EXPR_RTL (rtlexp) = tramp; | |
3914 | fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list); | |
3915 | pop_obstacks (); | |
3916 | } | |
3917 | else | |
3918 | { | |
3919 | /* Make the RTL_EXPR node temporary, not momentary, so that the | |
3920 | trampoline_list doesn't become garbage. */ | |
3921 | int momentary = suspend_momentary (); | |
3922 | rtlexp = make_node (RTL_EXPR); | |
3923 | resume_momentary (momentary); | |
3924 | ||
3925 | RTL_EXPR_RTL (rtlexp) = tramp; | |
3926 | trampoline_list = tree_cons (function, rtlexp, trampoline_list); | |
3927 | } | |
3928 | ||
3929 | tramp = fix_lexical_addr (XEXP (tramp, 0), function); | |
3930 | return round_trampoline_addr (tramp); | |
3931 | } | |
3932 | ||
3933 | /* Given a trampoline address, | |
3934 | round it to multiple of TRAMPOLINE_ALIGNMENT. */ | |
3935 | ||
3936 | static rtx | |
3937 | round_trampoline_addr (tramp) | |
3938 | rtx tramp; | |
3939 | { | |
3940 | #ifdef TRAMPOLINE_ALIGNMENT | |
3941 | /* Round address up to desired boundary. */ | |
3942 | rtx temp = gen_reg_rtx (Pmode); | |
3943 | temp = expand_binop (Pmode, add_optab, tramp, | |
3944 | GEN_INT (TRAMPOLINE_ALIGNMENT - 1), | |
3945 | temp, 0, OPTAB_LIB_WIDEN); | |
3946 | tramp = expand_binop (Pmode, and_optab, temp, | |
3947 | GEN_INT (- TRAMPOLINE_ALIGNMENT), | |
3948 | temp, 0, OPTAB_LIB_WIDEN); | |
3949 | #endif | |
3950 | return tramp; | |
3951 | } | |
3952 | \f | |
3953 | /* The functions identify_blocks and reorder_blocks provide a way to | |
3954 | reorder the tree of BLOCK nodes, for optimizers that reshuffle or | |
3955 | duplicate portions of the RTL code. Call identify_blocks before | |
3956 | changing the RTL, and call reorder_blocks after. */ | |
3957 | ||
3958 | static int all_blocks (); | |
3959 | static tree blocks_nreverse (); | |
3960 | ||
3961 | /* Put all this function's BLOCK nodes into a vector, and return it. | |
3962 | Also store in each NOTE for the beginning or end of a block | |
3963 | the index of that block in the vector. | |
3964 | The arguments are TOP_BLOCK, the top-level block of the function, | |
3965 | and INSNS, the insn chain of the function. */ | |
3966 | ||
3967 | tree * | |
3968 | identify_blocks (top_block, insns) | |
3969 | tree top_block; | |
3970 | rtx insns; | |
3971 | { | |
3972 | int n_blocks; | |
3973 | tree *block_vector; | |
3974 | int *block_stack; | |
3975 | int depth = 0; | |
3976 | int next_block_number = 0; | |
3977 | int current_block_number = 0; | |
3978 | rtx insn; | |
3979 | ||
3980 | if (top_block == 0) | |
3981 | return 0; | |
3982 | ||
3983 | n_blocks = all_blocks (top_block, 0); | |
3984 | block_vector = (tree *) xmalloc (n_blocks * sizeof (tree)); | |
3985 | block_stack = (int *) alloca (n_blocks * sizeof (int)); | |
3986 | ||
3987 | all_blocks (top_block, block_vector); | |
3988 | ||
3989 | for (insn = insns; insn; insn = NEXT_INSN (insn)) | |
3990 | if (GET_CODE (insn) == NOTE) | |
3991 | { | |
3992 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG) | |
3993 | { | |
3994 | block_stack[depth++] = current_block_number; | |
3995 | current_block_number = next_block_number; | |
3996 | NOTE_BLOCK_NUMBER (insn) = next_block_number++; | |
3997 | } | |
3998 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END) | |
3999 | { | |
4000 | current_block_number = block_stack[--depth]; | |
4001 | NOTE_BLOCK_NUMBER (insn) = current_block_number; | |
4002 | } | |
4003 | } | |
4004 | ||
4005 | return block_vector; | |
4006 | } | |
4007 | ||
4008 | /* Given BLOCK_VECTOR which was returned by identify_blocks, | |
4009 | and a revised instruction chain, rebuild the tree structure | |
4010 | of BLOCK nodes to correspond to the new order of RTL. | |
4011 | The new block tree is inserted below TOP_BLOCK. | |
4012 | Returns the current top-level block. */ | |
4013 | ||
4014 | tree | |
4015 | reorder_blocks (block_vector, top_block, insns) | |
4016 | tree *block_vector; | |
4017 | tree top_block; | |
4018 | rtx insns; | |
4019 | { | |
4020 | tree current_block = top_block; | |
4021 | rtx insn; | |
4022 | ||
4023 | if (block_vector == 0) | |
4024 | return top_block; | |
4025 | ||
4026 | /* Prune the old tree away, so that it doesn't get in the way. */ | |
4027 | BLOCK_SUBBLOCKS (current_block) = 0; | |
4028 | ||
4029 | for (insn = insns; insn; insn = NEXT_INSN (insn)) | |
4030 | if (GET_CODE (insn) == NOTE) | |
4031 | { | |
4032 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG) | |
4033 | { | |
4034 | tree block = block_vector[NOTE_BLOCK_NUMBER (insn)]; | |
4035 | /* If we have seen this block before, copy it. */ | |
4036 | if (TREE_ASM_WRITTEN (block)) | |
4037 | block = copy_node (block); | |
4038 | BLOCK_SUBBLOCKS (block) = 0; | |
4039 | TREE_ASM_WRITTEN (block) = 1; | |
4040 | BLOCK_SUPERCONTEXT (block) = current_block; | |
4041 | BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block); | |
4042 | BLOCK_SUBBLOCKS (current_block) = block; | |
4043 | current_block = block; | |
4044 | NOTE_SOURCE_FILE (insn) = 0; | |
4045 | } | |
4046 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END) | |
4047 | { | |
4048 | BLOCK_SUBBLOCKS (current_block) | |
4049 | = blocks_nreverse (BLOCK_SUBBLOCKS (current_block)); | |
4050 | current_block = BLOCK_SUPERCONTEXT (current_block); | |
4051 | NOTE_SOURCE_FILE (insn) = 0; | |
4052 | } | |
4053 | } | |
4054 | ||
4055 | return current_block; | |
4056 | } | |
4057 | ||
4058 | /* Reverse the order of elements in the chain T of blocks, | |
4059 | and return the new head of the chain (old last element). */ | |
4060 | ||
4061 | static tree | |
4062 | blocks_nreverse (t) | |
4063 | tree t; | |
4064 | { | |
4065 | register tree prev = 0, decl, next; | |
4066 | for (decl = t; decl; decl = next) | |
4067 | { | |
4068 | next = BLOCK_CHAIN (decl); | |
4069 | BLOCK_CHAIN (decl) = prev; | |
4070 | prev = decl; | |
4071 | } | |
4072 | return prev; | |
4073 | } | |
4074 | ||
4075 | /* Count the subblocks of BLOCK, and list them all into the vector VECTOR. | |
4076 | Also clear TREE_ASM_WRITTEN in all blocks. */ | |
4077 | ||
4078 | static int | |
4079 | all_blocks (block, vector) | |
4080 | tree block; | |
4081 | tree *vector; | |
4082 | { | |
4083 | int n_blocks = 1; | |
4084 | tree subblocks; | |
4085 | ||
4086 | TREE_ASM_WRITTEN (block) = 0; | |
4087 | /* Record this block. */ | |
4088 | if (vector) | |
4089 | vector[0] = block; | |
4090 | ||
4091 | /* Record the subblocks, and their subblocks. */ | |
4092 | for (subblocks = BLOCK_SUBBLOCKS (block); | |
4093 | subblocks; subblocks = BLOCK_CHAIN (subblocks)) | |
4094 | n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0); | |
4095 | ||
4096 | return n_blocks; | |
4097 | } | |
4098 | \f | |
4099 | /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node) | |
4100 | and initialize static variables for generating RTL for the statements | |
4101 | of the function. */ | |
4102 | ||
4103 | void | |
4104 | init_function_start (subr, filename, line) | |
4105 | tree subr; | |
4106 | char *filename; | |
4107 | int line; | |
4108 | { | |
4109 | char *junk; | |
4110 | ||
4111 | init_stmt_for_function (); | |
4112 | ||
4113 | cse_not_expected = ! optimize; | |
4114 | ||
4115 | /* Caller save not needed yet. */ | |
4116 | caller_save_needed = 0; | |
4117 | ||
4118 | /* No stack slots have been made yet. */ | |
4119 | stack_slot_list = 0; | |
4120 | ||
4121 | /* There is no stack slot for handling nonlocal gotos. */ | |
4122 | nonlocal_goto_handler_slot = 0; | |
4123 | nonlocal_goto_stack_level = 0; | |
4124 | ||
4125 | /* No labels have been declared for nonlocal use. */ | |
4126 | nonlocal_labels = 0; | |
4127 | ||
4128 | /* No function calls so far in this function. */ | |
4129 | function_call_count = 0; | |
4130 | ||
4131 | /* No parm regs have been allocated. | |
4132 | (This is important for output_inline_function.) */ | |
4133 | max_parm_reg = LAST_VIRTUAL_REGISTER + 1; | |
4134 | ||
4135 | /* Initialize the RTL mechanism. */ | |
4136 | init_emit (); | |
4137 | ||
4138 | /* Initialize the queue of pending postincrement and postdecrements, | |
4139 | and some other info in expr.c. */ | |
4140 | init_expr (); | |
4141 | ||
4142 | /* We haven't done register allocation yet. */ | |
4143 | reg_renumber = 0; | |
4144 | ||
4145 | init_const_rtx_hash_table (); | |
4146 | ||
4147 | current_function_name = (*decl_printable_name) (subr, &junk); | |
4148 | ||
4149 | /* Nonzero if this is a nested function that uses a static chain. */ | |
4150 | ||
4151 | current_function_needs_context | |
4152 | = (decl_function_context (current_function_decl) != 0); | |
4153 | ||
4154 | /* Set if a call to setjmp is seen. */ | |
4155 | current_function_calls_setjmp = 0; | |
4156 | ||
4157 | /* Set if a call to longjmp is seen. */ | |
4158 | current_function_calls_longjmp = 0; | |
4159 | ||
4160 | current_function_calls_alloca = 0; | |
4161 | current_function_has_nonlocal_label = 0; | |
4162 | current_function_contains_functions = 0; | |
4163 | ||
4164 | current_function_returns_pcc_struct = 0; | |
4165 | current_function_returns_struct = 0; | |
4166 | current_function_epilogue_delay_list = 0; | |
4167 | current_function_uses_const_pool = 0; | |
4168 | current_function_uses_pic_offset_table = 0; | |
4169 | ||
4170 | /* We have not yet needed to make a label to jump to for tail-recursion. */ | |
4171 | tail_recursion_label = 0; | |
4172 | ||
4173 | /* We haven't had a need to make a save area for ap yet. */ | |
4174 | ||
4175 | arg_pointer_save_area = 0; | |
4176 | ||
4177 | /* No stack slots allocated yet. */ | |
4178 | frame_offset = 0; | |
4179 | ||
4180 | /* No SAVE_EXPRs in this function yet. */ | |
4181 | save_expr_regs = 0; | |
4182 | ||
4183 | /* No RTL_EXPRs in this function yet. */ | |
4184 | rtl_expr_chain = 0; | |
4185 | ||
4186 | /* We have not allocated any temporaries yet. */ | |
4187 | temp_slots = 0; | |
4188 | temp_slot_level = 0; | |
4189 | ||
4190 | /* Within function body, compute a type's size as soon it is laid out. */ | |
4191 | immediate_size_expand++; | |
4192 | ||
4193 | init_pending_stack_adjust (); | |
4194 | inhibit_defer_pop = 0; | |
4195 | ||
4196 | current_function_outgoing_args_size = 0; | |
4197 | ||
4198 | /* Initialize the insn lengths. */ | |
4199 | init_insn_lengths (); | |
4200 | ||
4201 | /* Prevent ever trying to delete the first instruction of a function. | |
4202 | Also tell final how to output a linenum before the function prologue. */ | |
4203 | emit_line_note (filename, line); | |
4204 | ||
4205 | /* Make sure first insn is a note even if we don't want linenums. | |
4206 | This makes sure the first insn will never be deleted. | |
4207 | Also, final expects a note to appear there. */ | |
4208 | emit_note (NULL_PTR, NOTE_INSN_DELETED); | |
4209 | ||
4210 | /* Set flags used by final.c. */ | |
4211 | if (aggregate_value_p (DECL_RESULT (subr))) | |
4212 | { | |
4213 | #ifdef PCC_STATIC_STRUCT_RETURN | |
4214 | current_function_returns_pcc_struct = 1; | |
4215 | #endif | |
4216 | current_function_returns_struct = 1; | |
4217 | } | |
4218 | ||
4219 | /* Warn if this value is an aggregate type, | |
4220 | regardless of which calling convention we are using for it. */ | |
4221 | if (warn_aggregate_return | |
4222 | && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE | |
4223 | || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE | |
4224 | || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == QUAL_UNION_TYPE | |
4225 | || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE)) | |
4226 | warning ("function returns an aggregate"); | |
4227 | ||
4228 | current_function_returns_pointer | |
4229 | = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE); | |
4230 | ||
4231 | /* Indicate that we need to distinguish between the return value of the | |
4232 | present function and the return value of a function being called. */ | |
4233 | rtx_equal_function_value_matters = 1; | |
4234 | ||
4235 | /* Indicate that we have not instantiated virtual registers yet. */ | |
4236 | virtuals_instantiated = 0; | |
4237 | ||
4238 | /* Indicate we have no need of a frame pointer yet. */ | |
4239 | frame_pointer_needed = 0; | |
4240 | ||
4241 | /* By default assume not varargs. */ | |
4242 | current_function_varargs = 0; | |
4243 | } | |
4244 | ||
4245 | /* Indicate that the current function uses extra args | |
4246 | not explicitly mentioned in the argument list in any fashion. */ | |
4247 | ||
4248 | void | |
4249 | mark_varargs () | |
4250 | { | |
4251 | current_function_varargs = 1; | |
4252 | } | |
4253 | ||
4254 | /* Expand a call to __main at the beginning of a possible main function. */ | |
4255 | ||
4256 | void | |
4257 | expand_main_function () | |
4258 | { | |
4259 | #if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main) | |
4260 | emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__main"), 0, | |
4261 | VOIDmode, 0); | |
4262 | #endif /* not INIT_SECTION_ASM_OP or INVOKE__main */ | |
4263 | } | |
4264 | \f | |
4265 | /* Start the RTL for a new function, and set variables used for | |
4266 | emitting RTL. | |
4267 | SUBR is the FUNCTION_DECL node. | |
4268 | PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with | |
4269 | the function's parameters, which must be run at any return statement. */ | |
4270 | ||
4271 | void | |
4272 | expand_function_start (subr, parms_have_cleanups) | |
4273 | tree subr; | |
4274 | int parms_have_cleanups; | |
4275 | { | |
4276 | register int i; | |
4277 | tree tem; | |
4278 | rtx last_ptr; | |
4279 | ||
4280 | /* Make sure volatile mem refs aren't considered | |
4281 | valid operands of arithmetic insns. */ | |
4282 | init_recog_no_volatile (); | |
4283 | ||
4284 | /* If function gets a static chain arg, store it in the stack frame. | |
4285 | Do this first, so it gets the first stack slot offset. */ | |
4286 | if (current_function_needs_context) | |
4287 | { | |
4288 | last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); | |
4289 | emit_move_insn (last_ptr, static_chain_incoming_rtx); | |
4290 | } | |
4291 | ||
4292 | /* If the parameters of this function need cleaning up, get a label | |
4293 | for the beginning of the code which executes those cleanups. This must | |
4294 | be done before doing anything with return_label. */ | |
4295 | if (parms_have_cleanups) | |
4296 | cleanup_label = gen_label_rtx (); | |
4297 | else | |
4298 | cleanup_label = 0; | |
4299 | ||
4300 | /* Make the label for return statements to jump to, if this machine | |
4301 | does not have a one-instruction return and uses an epilogue, | |
4302 | or if it returns a structure, or if it has parm cleanups. */ | |
4303 | #ifdef HAVE_return | |
4304 | if (cleanup_label == 0 && HAVE_return | |
4305 | && ! current_function_returns_pcc_struct | |
4306 | && ! (current_function_returns_struct && ! optimize)) | |
4307 | return_label = 0; | |
4308 | else | |
4309 | return_label = gen_label_rtx (); | |
4310 | #else | |
4311 | return_label = gen_label_rtx (); | |
4312 | #endif | |
4313 | ||
4314 | /* Initialize rtx used to return the value. */ | |
4315 | /* Do this before assign_parms so that we copy the struct value address | |
4316 | before any library calls that assign parms might generate. */ | |
4317 | ||
4318 | /* Decide whether to return the value in memory or in a register. */ | |
4319 | if (aggregate_value_p (DECL_RESULT (subr))) | |
4320 | { | |
4321 | /* Returning something that won't go in a register. */ | |
4322 | register rtx value_address; | |
4323 | ||
4324 | #ifdef PCC_STATIC_STRUCT_RETURN | |
4325 | if (current_function_returns_pcc_struct) | |
4326 | { | |
4327 | int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr))); | |
4328 | value_address = assemble_static_space (size); | |
4329 | } | |
4330 | else | |
4331 | #endif | |
4332 | { | |
4333 | /* Expect to be passed the address of a place to store the value. | |
4334 | If it is passed as an argument, assign_parms will take care of | |
4335 | it. */ | |
4336 | if (struct_value_incoming_rtx) | |
4337 | { | |
4338 | value_address = gen_reg_rtx (Pmode); | |
4339 | emit_move_insn (value_address, struct_value_incoming_rtx); | |
4340 | } | |
4341 | } | |
4342 | if (value_address) | |
4343 | DECL_RTL (DECL_RESULT (subr)) | |
4344 | = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), | |
4345 | value_address); | |
4346 | } | |
4347 | else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode) | |
4348 | /* If return mode is void, this decl rtl should not be used. */ | |
4349 | DECL_RTL (DECL_RESULT (subr)) = 0; | |
4350 | else if (parms_have_cleanups) | |
4351 | { | |
4352 | /* If function will end with cleanup code for parms, | |
4353 | compute the return values into a pseudo reg, | |
4354 | which we will copy into the true return register | |
4355 | after the cleanups are done. */ | |
4356 | ||
4357 | enum machine_mode mode = DECL_MODE (DECL_RESULT (subr)); | |
4358 | #ifdef PROMOTE_FUNCTION_RETURN | |
4359 | tree type = TREE_TYPE (DECL_RESULT (subr)); | |
4360 | int unsignedp = TREE_UNSIGNED (type); | |
4361 | ||
4362 | if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE | |
4363 | || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE | |
4364 | || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE | |
4365 | || TREE_CODE (type) == OFFSET_TYPE) | |
4366 | { | |
4367 | PROMOTE_MODE (mode, unsignedp, type); | |
4368 | } | |
4369 | #endif | |
4370 | ||
4371 | DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode); | |
4372 | } | |
4373 | else | |
4374 | /* Scalar, returned in a register. */ | |
4375 | { | |
4376 | #ifdef FUNCTION_OUTGOING_VALUE | |
4377 | DECL_RTL (DECL_RESULT (subr)) | |
4378 | = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr); | |
4379 | #else | |
4380 | DECL_RTL (DECL_RESULT (subr)) | |
4381 | = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr); | |
4382 | #endif | |
4383 | ||
4384 | /* Mark this reg as the function's return value. */ | |
4385 | if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG) | |
4386 | { | |
4387 | REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1; | |
4388 | /* Needed because we may need to move this to memory | |
4389 | in case it's a named return value whose address is taken. */ | |
4390 | DECL_REGISTER (DECL_RESULT (subr)) = 1; | |
4391 | } | |
4392 | } | |
4393 | ||
4394 | /* Initialize rtx for parameters and local variables. | |
4395 | In some cases this requires emitting insns. */ | |
4396 | ||
4397 | assign_parms (subr, 0); | |
4398 | ||
4399 | /* The following was moved from init_function_start. | |
4400 | The move is supposed to make sdb output more accurate. */ | |
4401 | /* Indicate the beginning of the function body, | |
4402 | as opposed to parm setup. */ | |
4403 | emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG); | |
4404 | ||
4405 | /* If doing stupid allocation, mark parms as born here. */ | |
4406 | ||
4407 | if (GET_CODE (get_last_insn ()) != NOTE) | |
4408 | emit_note (NULL_PTR, NOTE_INSN_DELETED); | |
4409 | parm_birth_insn = get_last_insn (); | |
4410 | ||
4411 | if (obey_regdecls) | |
4412 | { | |
4413 | for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++) | |
4414 | use_variable (regno_reg_rtx[i]); | |
4415 | ||
4416 | if (current_function_internal_arg_pointer != virtual_incoming_args_rtx) | |
4417 | use_variable (current_function_internal_arg_pointer); | |
4418 | } | |
4419 | ||
4420 | /* Fetch static chain values for containing functions. */ | |
4421 | tem = decl_function_context (current_function_decl); | |
4422 | /* If not doing stupid register allocation, then start off with the static | |
4423 | chain pointer in a pseudo register. Otherwise, we use the stack | |
4424 | address that was generated above. */ | |
4425 | if (tem && ! obey_regdecls) | |
4426 | last_ptr = copy_to_reg (static_chain_incoming_rtx); | |
4427 | context_display = 0; | |
4428 | while (tem) | |
4429 | { | |
4430 | tree rtlexp = make_node (RTL_EXPR); | |
4431 | ||
4432 | RTL_EXPR_RTL (rtlexp) = last_ptr; | |
4433 | context_display = tree_cons (tem, rtlexp, context_display); | |
4434 | tem = decl_function_context (tem); | |
4435 | if (tem == 0) | |
4436 | break; | |
4437 | /* Chain thru stack frames, assuming pointer to next lexical frame | |
4438 | is found at the place we always store it. */ | |
4439 | #ifdef FRAME_GROWS_DOWNWARD | |
4440 | last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode)); | |
4441 | #endif | |
4442 | last_ptr = copy_to_reg (gen_rtx (MEM, Pmode, | |
4443 | memory_address (Pmode, last_ptr))); | |
4444 | } | |
4445 | ||
4446 | /* After the display initializations is where the tail-recursion label | |
4447 | should go, if we end up needing one. Ensure we have a NOTE here | |
4448 | since some things (like trampolines) get placed before this. */ | |
4449 | tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED); | |
4450 | ||
4451 | /* Evaluate now the sizes of any types declared among the arguments. */ | |
4452 | for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem)) | |
4453 | expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0); | |
4454 | ||
4455 | /* Make sure there is a line number after the function entry setup code. */ | |
4456 | force_next_line_note (); | |
4457 | } | |
4458 | \f | |
4459 | /* Generate RTL for the end of the current function. | |
4460 | FILENAME and LINE are the current position in the source file. */ | |
4461 | ||
4462 | /* It is up to language-specific callers to do cleanups for parameters. */ | |
4463 | ||
4464 | void | |
4465 | expand_function_end (filename, line) | |
4466 | char *filename; | |
4467 | int line; | |
4468 | { | |
4469 | register int i; | |
4470 | tree link; | |
4471 | ||
4472 | static rtx initial_trampoline; | |
4473 | ||
4474 | #ifdef NON_SAVING_SETJMP | |
4475 | /* Don't put any variables in registers if we call setjmp | |
4476 | on a machine that fails to restore the registers. */ | |
4477 | if (NON_SAVING_SETJMP && current_function_calls_setjmp) | |
4478 | { | |
4479 | setjmp_protect (DECL_INITIAL (current_function_decl)); | |
4480 | setjmp_protect_args (); | |
4481 | } | |
4482 | #endif | |
4483 | ||
4484 | /* Save the argument pointer if a save area was made for it. */ | |
4485 | if (arg_pointer_save_area) | |
4486 | { | |
4487 | rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx); | |
4488 | emit_insn_before (x, tail_recursion_reentry); | |
4489 | } | |
4490 | ||
4491 | /* Initialize any trampolines required by this function. */ | |
4492 | for (link = trampoline_list; link; link = TREE_CHAIN (link)) | |
4493 | { | |
4494 | tree function = TREE_PURPOSE (link); | |
4495 | rtx context = lookup_static_chain (function); | |
4496 | rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link)); | |
4497 | rtx seq; | |
4498 | ||
4499 | /* First make sure this compilation has a template for | |
4500 | initializing trampolines. */ | |
4501 | if (initial_trampoline == 0) | |
4502 | { | |
4503 | end_temporary_allocation (); | |
4504 | initial_trampoline | |
4505 | = gen_rtx (MEM, BLKmode, assemble_trampoline_template ()); | |
4506 | resume_temporary_allocation (); | |
4507 | } | |
4508 | ||
4509 | /* Generate insns to initialize the trampoline. */ | |
4510 | start_sequence (); | |
4511 | tramp = change_address (initial_trampoline, BLKmode, | |
4512 | round_trampoline_addr (XEXP (tramp, 0))); | |
4513 | emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE), | |
4514 | FUNCTION_BOUNDARY / BITS_PER_UNIT); | |
4515 | INITIALIZE_TRAMPOLINE (XEXP (tramp, 0), | |
4516 | XEXP (DECL_RTL (function), 0), context); | |
4517 | seq = get_insns (); | |
4518 | end_sequence (); | |
4519 | ||
4520 | /* Put those insns at entry to the containing function (this one). */ | |
4521 | emit_insns_before (seq, tail_recursion_reentry); | |
4522 | } | |
4523 | /* Clear the trampoline_list for the next function. */ | |
4524 | trampoline_list = 0; | |
4525 | ||
4526 | #if 0 /* I think unused parms are legitimate enough. */ | |
4527 | /* Warn about unused parms. */ | |
4528 | if (warn_unused) | |
4529 | { | |
4530 | rtx decl; | |
4531 | ||
4532 | for (decl = DECL_ARGUMENTS (current_function_decl); | |
4533 | decl; decl = TREE_CHAIN (decl)) | |
4534 | if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL) | |
4535 | warning_with_decl (decl, "unused parameter `%s'"); | |
4536 | } | |
4537 | #endif | |
4538 | ||
4539 | /* Delete handlers for nonlocal gotos if nothing uses them. */ | |
4540 | if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label) | |
4541 | delete_handlers (); | |
4542 | ||
4543 | /* End any sequences that failed to be closed due to syntax errors. */ | |
4544 | while (in_sequence_p ()) | |
4545 | end_sequence (); | |
4546 | ||
4547 | /* Outside function body, can't compute type's actual size | |
4548 | until next function's body starts. */ | |
4549 | immediate_size_expand--; | |
4550 | ||
4551 | /* If doing stupid register allocation, | |
4552 | mark register parms as dying here. */ | |
4553 | ||
4554 | if (obey_regdecls) | |
4555 | { | |
4556 | rtx tem; | |
4557 | for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++) | |
4558 | use_variable (regno_reg_rtx[i]); | |
4559 | ||
4560 | /* Likewise for the regs of all the SAVE_EXPRs in the function. */ | |
4561 | ||
4562 | for (tem = save_expr_regs; tem; tem = XEXP (tem, 1)) | |
4563 | { | |
4564 | use_variable (XEXP (tem, 0)); | |
4565 | use_variable_after (XEXP (tem, 0), parm_birth_insn); | |
4566 | } | |
4567 | ||
4568 | if (current_function_internal_arg_pointer != virtual_incoming_args_rtx) | |
4569 | use_variable (current_function_internal_arg_pointer); | |
4570 | } | |
4571 | ||
4572 | clear_pending_stack_adjust (); | |
4573 | do_pending_stack_adjust (); | |
4574 | ||
4575 | /* Mark the end of the function body. | |
4576 | If control reaches this insn, the function can drop through | |
4577 | without returning a value. */ | |
4578 | emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END); | |
4579 | ||
4580 | /* Output a linenumber for the end of the function. | |
4581 | SDB depends on this. */ | |
4582 | emit_line_note_force (filename, line); | |
4583 | ||
4584 | /* Output the label for the actual return from the function, | |
4585 | if one is expected. This happens either because a function epilogue | |
4586 | is used instead of a return instruction, or because a return was done | |
4587 | with a goto in order to run local cleanups, or because of pcc-style | |
4588 | structure returning. */ | |
4589 | ||
4590 | if (return_label) | |
4591 | emit_label (return_label); | |
4592 | ||
4593 | /* If we had calls to alloca, and this machine needs | |
4594 | an accurate stack pointer to exit the function, | |
4595 | insert some code to save and restore the stack pointer. */ | |
4596 | #ifdef EXIT_IGNORE_STACK | |
4597 | if (! EXIT_IGNORE_STACK) | |
4598 | #endif | |
4599 | if (current_function_calls_alloca) | |
4600 | { | |
4601 | rtx tem = 0; | |
4602 | ||
4603 | emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn); | |
4604 | emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX); | |
4605 | } | |
4606 | ||
4607 | /* If scalar return value was computed in a pseudo-reg, | |
4608 | copy that to the hard return register. */ | |
4609 | if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0 | |
4610 | && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG | |
4611 | && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl))) | |
4612 | >= FIRST_PSEUDO_REGISTER)) | |
4613 | { | |
4614 | rtx real_decl_result; | |
4615 | ||
4616 | #ifdef FUNCTION_OUTGOING_VALUE | |
4617 | real_decl_result | |
4618 | = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)), | |
4619 | current_function_decl); | |
4620 | #else | |
4621 | real_decl_result | |
4622 | = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)), | |
4623 | current_function_decl); | |
4624 | #endif | |
4625 | REG_FUNCTION_VALUE_P (real_decl_result) = 1; | |
4626 | emit_move_insn (real_decl_result, | |
4627 | DECL_RTL (DECL_RESULT (current_function_decl))); | |
4628 | emit_insn (gen_rtx (USE, VOIDmode, real_decl_result)); | |
4629 | } | |
4630 | ||
4631 | /* If returning a structure, arrange to return the address of the value | |
4632 | in a place where debuggers expect to find it. | |
4633 | ||
4634 | If returning a structure PCC style, | |
4635 | the caller also depends on this value. | |
4636 | And current_function_returns_pcc_struct is not necessarily set. */ | |
4637 | if (current_function_returns_struct | |
4638 | || current_function_returns_pcc_struct) | |
4639 | { | |
4640 | rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0); | |
4641 | tree type = TREE_TYPE (DECL_RESULT (current_function_decl)); | |
4642 | #ifdef FUNCTION_OUTGOING_VALUE | |
4643 | rtx outgoing | |
4644 | = FUNCTION_OUTGOING_VALUE (build_pointer_type (type), | |
4645 | current_function_decl); | |
4646 | #else | |
4647 | rtx outgoing | |
4648 | = FUNCTION_VALUE (build_pointer_type (type), | |
4649 | current_function_decl); | |
4650 | #endif | |
4651 | ||
4652 | /* Mark this as a function return value so integrate will delete the | |
4653 | assignment and USE below when inlining this function. */ | |
4654 | REG_FUNCTION_VALUE_P (outgoing) = 1; | |
4655 | ||
4656 | emit_move_insn (outgoing, value_address); | |
4657 | use_variable (outgoing); | |
4658 | } | |
4659 | ||
4660 | /* Output a return insn if we are using one. | |
4661 | Otherwise, let the rtl chain end here, to drop through | |
4662 | into the epilogue. */ | |
4663 | ||
4664 | #ifdef HAVE_return | |
4665 | if (HAVE_return) | |
4666 | { | |
4667 | emit_jump_insn (gen_return ()); | |
4668 | emit_barrier (); | |
4669 | } | |
4670 | #endif | |
4671 | ||
4672 | /* Fix up any gotos that jumped out to the outermost | |
4673 | binding level of the function. | |
4674 | Must follow emitting RETURN_LABEL. */ | |
4675 | ||
4676 | /* If you have any cleanups to do at this point, | |
4677 | and they need to create temporary variables, | |
4678 | then you will lose. */ | |
4679 | fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0); | |
4680 | } | |
4681 | \f | |
4682 | /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */ | |
4683 | ||
4684 | static int *prologue; | |
4685 | static int *epilogue; | |
4686 | ||
4687 | /* Create an array that records the INSN_UIDs of INSNS (either a sequence | |
4688 | or a single insn). */ | |
4689 | ||
4690 | static int * | |
4691 | record_insns (insns) | |
4692 | rtx insns; | |
4693 | { | |
4694 | int *vec; | |
4695 | ||
4696 | if (GET_CODE (insns) == SEQUENCE) | |
4697 | { | |
4698 | int len = XVECLEN (insns, 0); | |
4699 | vec = (int *) oballoc ((len + 1) * sizeof (int)); | |
4700 | vec[len] = 0; | |
4701 | while (--len >= 0) | |
4702 | vec[len] = INSN_UID (XVECEXP (insns, 0, len)); | |
4703 | } | |
4704 | else | |
4705 | { | |
4706 | vec = (int *) oballoc (2 * sizeof (int)); | |
4707 | vec[0] = INSN_UID (insns); | |
4708 | vec[1] = 0; | |
4709 | } | |
4710 | return vec; | |
4711 | } | |
4712 | ||
4713 | /* Determine how many INSN_UIDs in VEC are part of INSN. */ | |
4714 | ||
4715 | static int | |
4716 | contains (insn, vec) | |
4717 | rtx insn; | |
4718 | int *vec; | |
4719 | { | |
4720 | register int i, j; | |
4721 | ||
4722 | if (GET_CODE (insn) == INSN | |
4723 | && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
4724 | { | |
4725 | int count = 0; | |
4726 | for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--) | |
4727 | for (j = 0; vec[j]; j++) | |
4728 | if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j]) | |
4729 | count++; | |
4730 | return count; | |
4731 | } | |
4732 | else | |
4733 | { | |
4734 | for (j = 0; vec[j]; j++) | |
4735 | if (INSN_UID (insn) == vec[j]) | |
4736 | return 1; | |
4737 | } | |
4738 | return 0; | |
4739 | } | |
4740 | ||
4741 | /* Generate the prologe and epilogue RTL if the machine supports it. Thread | |
4742 | this into place with notes indicating where the prologue ends and where | |
4743 | the epilogue begins. Update the basic block information when possible. */ | |
4744 | ||
4745 | void | |
4746 | thread_prologue_and_epilogue_insns (f) | |
4747 | rtx f; | |
4748 | { | |
4749 | #ifdef HAVE_prologue | |
4750 | if (HAVE_prologue) | |
4751 | { | |
4752 | rtx head, seq, insn; | |
4753 | ||
4754 | /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more | |
4755 | prologue insns and a NOTE_INSN_PROLOGUE_END. */ | |
4756 | emit_note_after (NOTE_INSN_PROLOGUE_END, f); | |
4757 | seq = gen_prologue (); | |
4758 | head = emit_insn_after (seq, f); | |
4759 | ||
4760 | /* Include the new prologue insns in the first block. Ignore them | |
4761 | if they form a basic block unto themselves. */ | |
4762 | if (basic_block_head && n_basic_blocks | |
4763 | && GET_CODE (basic_block_head[0]) != CODE_LABEL) | |
4764 | basic_block_head[0] = NEXT_INSN (f); | |
4765 | ||
4766 | /* Retain a map of the prologue insns. */ | |
4767 | prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head); | |
4768 | } | |
4769 | else | |
4770 | #endif | |
4771 | prologue = 0; | |
4772 | ||
4773 | #ifdef HAVE_epilogue | |
4774 | if (HAVE_epilogue) | |
4775 | { | |
4776 | rtx insn = get_last_insn (); | |
4777 | rtx prev = prev_nonnote_insn (insn); | |
4778 | ||
4779 | /* If we end with a BARRIER, we don't need an epilogue. */ | |
4780 | if (! (prev && GET_CODE (prev) == BARRIER)) | |
4781 | { | |
4782 | rtx tail, seq; | |
4783 | ||
4784 | /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, | |
4785 | the epilogue insns (this must include the jump insn that | |
4786 | returns), USE insns ad the end of a function, and a BARRIER. */ | |
4787 | ||
4788 | emit_barrier_after (insn); | |
4789 | ||
4790 | /* Place the epilogue before the USE insns at the end of a | |
4791 | function. */ | |
4792 | while (prev | |
4793 | && GET_CODE (prev) == INSN | |
4794 | && GET_CODE (PATTERN (prev)) == USE) | |
4795 | { | |
4796 | insn = PREV_INSN (prev); | |
4797 | prev = prev_nonnote_insn (prev); | |
4798 | } | |
4799 | ||
4800 | seq = gen_epilogue (); | |
4801 | tail = emit_jump_insn_after (seq, insn); | |
4802 | emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn); | |
4803 | ||
4804 | /* Include the new epilogue insns in the last block. Ignore | |
4805 | them if they form a basic block unto themselves. */ | |
4806 | if (basic_block_end && n_basic_blocks | |
4807 | && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN) | |
4808 | basic_block_end[n_basic_blocks - 1] = tail; | |
4809 | ||
4810 | /* Retain a map of the epilogue insns. */ | |
4811 | epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail); | |
4812 | return; | |
4813 | } | |
4814 | } | |
4815 | #endif | |
4816 | epilogue = 0; | |
4817 | } | |
4818 | ||
4819 | /* Reposition the prologue-end and epilogue-begin notes after instruction | |
4820 | scheduling and delayed branch scheduling. */ | |
4821 | ||
4822 | void | |
4823 | reposition_prologue_and_epilogue_notes (f) | |
4824 | rtx f; | |
4825 | { | |
4826 | #if defined (HAVE_prologue) || defined (HAVE_epilogue) | |
4827 | /* Reposition the prologue and epilogue notes. */ | |
4828 | if (n_basic_blocks) | |
4829 | { | |
4830 | rtx next, prev; | |
4831 | int len; | |
4832 | ||
4833 | if (prologue) | |
4834 | { | |
4835 | register rtx insn, note = 0; | |
4836 | ||
4837 | /* Scan from the beginning until we reach the last prologue insn. | |
4838 | We apparently can't depend on basic_block_{head,end} after | |
4839 | reorg has run. */ | |
4840 | for (len = 0; prologue[len]; len++) | |
4841 | ; | |
4842 | for (insn = f; len && insn; insn = NEXT_INSN (insn)) | |
4843 | { | |
4844 | if (GET_CODE (insn) == NOTE) | |
4845 | { | |
4846 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END) | |
4847 | note = insn; | |
4848 | } | |
4849 | else if ((len -= contains (insn, prologue)) == 0) | |
4850 | { | |
4851 | /* Find the prologue-end note if we haven't already, and | |
4852 | move it to just after the last prologue insn. */ | |
4853 | if (note == 0) | |
4854 | { | |
4855 | for (note = insn; note = NEXT_INSN (note);) | |
4856 | if (GET_CODE (note) == NOTE | |
4857 | && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END) | |
4858 | break; | |
4859 | } | |
4860 | next = NEXT_INSN (note); | |
4861 | prev = PREV_INSN (note); | |
4862 | if (prev) | |
4863 | NEXT_INSN (prev) = next; | |
4864 | if (next) | |
4865 | PREV_INSN (next) = prev; | |
4866 | add_insn_after (note, insn); | |
4867 | } | |
4868 | } | |
4869 | } | |
4870 | ||
4871 | if (epilogue) | |
4872 | { | |
4873 | register rtx insn, note = 0; | |
4874 | ||
4875 | /* Scan from the end until we reach the first epilogue insn. | |
4876 | We apparently can't depend on basic_block_{head,end} after | |
4877 | reorg has run. */ | |
4878 | for (len = 0; epilogue[len]; len++) | |
4879 | ; | |
4880 | for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn)) | |
4881 | { | |
4882 | if (GET_CODE (insn) == NOTE) | |
4883 | { | |
4884 | if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG) | |
4885 | note = insn; | |
4886 | } | |
4887 | else if ((len -= contains (insn, epilogue)) == 0) | |
4888 | { | |
4889 | /* Find the epilogue-begin note if we haven't already, and | |
4890 | move it to just before the first epilogue insn. */ | |
4891 | if (note == 0) | |
4892 | { | |
4893 | for (note = insn; note = PREV_INSN (note);) | |
4894 | if (GET_CODE (note) == NOTE | |
4895 | && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG) | |
4896 | break; | |
4897 | } | |
4898 | next = NEXT_INSN (note); | |
4899 | prev = PREV_INSN (note); | |
4900 | if (prev) | |
4901 | NEXT_INSN (prev) = next; | |
4902 | if (next) | |
4903 | PREV_INSN (next) = prev; | |
4904 | add_insn_after (note, PREV_INSN (insn)); | |
4905 | } | |
4906 | } | |
4907 | } | |
4908 | } | |
4909 | #endif /* HAVE_prologue or HAVE_epilogue */ | |
4910 | } |