]>
Commit | Line | Data |
---|---|---|
12670d88 | 1 | /* Implements exception handling. |
4956d07c MS |
2 | Copyright (C) 1989, 92-95, 1996 Free Software Foundation, Inc. |
3 | Contributed by Mike Stump <mrs@cygnus.com>. | |
4 | ||
5 | This file is part of GNU CC. | |
6 | ||
7 | GNU CC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GNU CC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GNU CC; see the file COPYING. If not, write to | |
19 | the Free Software Foundation, 59 Temple Place - Suite 330, | |
20 | Boston, MA 02111-1307, USA. */ | |
21 | ||
22 | ||
12670d88 RK |
23 | /* An exception is an event that can be signaled from within a |
24 | function. This event can then be "caught" or "trapped" by the | |
25 | callers of this function. This potentially allows program flow to | |
26 | be transferred to any arbitrary code assocated with a function call | |
27 | several levels up the stack. | |
28 | ||
29 | The intended use for this mechanism is for signaling "exceptional | |
30 | events" in an out-of-band fashion, hence its name. The C++ language | |
31 | (and many other OO-styled or functional languages) practically | |
32 | requires such a mechanism, as otherwise it becomes very difficult | |
33 | or even impossible to signal failure conditions in complex | |
34 | situations. The traditional C++ example is when an error occurs in | |
35 | the process of constructing an object; without such a mechanism, it | |
36 | is impossible to signal that the error occurs without adding global | |
37 | state variables and error checks around every object construction. | |
38 | ||
39 | The act of causing this event to occur is referred to as "throwing | |
40 | an exception". (Alternate terms include "raising an exception" or | |
41 | "signaling an exception".) The term "throw" is used because control | |
42 | is returned to the callers of the function that is signaling the | |
43 | exception, and thus there is the concept of "throwing" the | |
44 | exception up the call stack. | |
45 | ||
46 | It is appropriate to speak of the "context of a throw". This | |
47 | context refers to the address where the exception is thrown from, | |
48 | and is used to determine which exception region will handle the | |
49 | exception. | |
50 | ||
51 | Regions of code within a function can be marked such that if it | |
52 | contains the context of a throw, control will be passed to a | |
53 | designated "exception handler". These areas are known as "exception | |
54 | regions". Exception regions cannot overlap, but they can be nested | |
55 | to any arbitrary depth. Also, exception regions cannot cross | |
56 | function boundaries. | |
57 | ||
2ed18e63 MS |
58 | Exception handlers can either be specified by the user (which we |
59 | will call a "user-defined handler") or generated by the compiler | |
60 | (which we will designate as a "cleanup"). Cleanups are used to | |
61 | perform tasks such as destruction of objects allocated on the | |
62 | stack. | |
63 | ||
64 | In the current implementaion, cleanups are handled by allocating an | |
65 | exception region for the area that the cleanup is designated for, | |
66 | and the handler for the region performs the cleanup and then | |
67 | rethrows the exception to the outer exception region. From the | |
68 | standpoint of the current implementation, there is little | |
69 | distinction made between a cleanup and a user-defined handler, and | |
70 | the phrase "exception handler" can be used to refer to either one | |
71 | equally well. (The section "Future Directions" below discusses how | |
72 | this will change). | |
73 | ||
74 | Each object file that is compiled with exception handling contains | |
75 | a static array of exception handlers named __EXCEPTION_TABLE__. | |
76 | Each entry contains the starting and ending addresses of the | |
77 | exception region, and the address of the handler designated for | |
78 | that region. | |
12670d88 RK |
79 | |
80 | At program startup each object file invokes a function named | |
81 | __register_exceptions with the address of its local | |
82 | __EXCEPTION_TABLE__. __register_exceptions is defined in libgcc2.c, | |
83 | and is responsible for recording all of the exception regions into | |
84 | one list (which is kept in a static variable named exception_table_list). | |
85 | ||
86 | The function __throw () is actually responsible for doing the | |
87 | throw. In the C++ frontend, __throw () is generated on a | |
88 | per-object-file basis for each source file compiled with | |
2ed18e63 MS |
89 | -fexceptions. Before __throw () is invoked, the current context |
90 | of the throw needs to be placed in the global variable __eh_pc. | |
12670d88 RK |
91 | |
92 | __throw () attempts to find the appropriate exception handler for the | |
93 | PC value stored in __eh_pc by calling __find_first_exception_table_match | |
2ed18e63 MS |
94 | (which is defined in libgcc2.c). If __find_first_exception_table_match |
95 | finds a relevant handler, __throw jumps directly to it. | |
12670d88 | 96 | |
2ed18e63 | 97 | If a handler for the context being thrown from can't be found, |
12670d88 RK |
98 | __throw is responsible for unwinding the stack, determining the |
99 | address of the caller of the current function (which will be used | |
2ed18e63 MS |
100 | as the new context to throw from), and then restarting the process |
101 | of searching for a handler for the new context. __throw may also | |
102 | call abort () if it is unable to unwind the stack, and can also | |
103 | call an external library function named __terminate if it reaches | |
104 | the top of the stack without finding an appropriate handler. (By | |
105 | default __terminate () invokes abort (), but this behavior can be | |
106 | changed by the user to perform some sort of cleanup behavior before | |
107 | exiting). | |
12670d88 RK |
108 | |
109 | Internal implementation details: | |
110 | ||
12670d88 RK |
111 | To associate a user-defined handler with a block of statements, the |
112 | function expand_start_try_stmts () is used to mark the start of the | |
113 | block of statements with which the handler is to be associated | |
2ed18e63 MS |
114 | (which is known as a "try block"). All statements that appear |
115 | afterwards will be associated with the try block. | |
116 | ||
117 | A call to expand_start_all_catch () marks the end of the try block, | |
118 | and also marks the start of the "catch block" (the user-defined | |
119 | handler) associated with the try block. | |
120 | ||
121 | This user-defined handler will be invoked for *every* exception | |
122 | thrown with the context of the try block. It is up to the handler | |
123 | to decide whether or not it wishes to handle any given exception, | |
124 | as there is currently no mechanism in this implementation for doing | |
125 | this. (There are plans for conditionally processing an exception | |
126 | based on its "type", which will provide a language-independent | |
127 | mechanism). | |
128 | ||
129 | If the handler chooses not to process the exception (perhaps by | |
130 | looking at an "exception type" or some other additional data | |
131 | supplied with the exception), it can fall through to the end of the | |
132 | handler. expand_end_all_catch () and expand_leftover_cleanups () | |
133 | add additional code to the end of each handler to take care of | |
134 | rethrowing to the outer exception handler. | |
135 | ||
136 | The handler also has the option to continue with "normal flow of | |
137 | code", or in other words to resume executing at the statement | |
138 | immediately after the end of the exception region. The variable | |
139 | caught_return_label_stack contains a stack of labels, and jumping | |
140 | to the topmost entry's label via expand_goto () will resume normal | |
141 | flow to the statement immediately after the end of the exception | |
142 | region. If the handler falls through to the end, the exception will | |
143 | be rethrown to the outer exception region. | |
144 | ||
145 | The instructions for the catch block are kept as a separate | |
146 | sequence, and will be emitted at the end of the function along with | |
147 | the handlers specified via expand_eh_region_end (). The end of the | |
148 | catch block is marked with expand_end_all_catch (). | |
12670d88 RK |
149 | |
150 | Any data associated with the exception must currently be handled by | |
151 | some external mechanism maintained in the frontend. For example, | |
152 | the C++ exception mechanism passes an arbitrary value along with | |
153 | the exception, and this is handled in the C++ frontend by using a | |
2ed18e63 MS |
154 | global variable to hold the value. (This will be changing in the |
155 | future.) | |
156 | ||
157 | The mechanism in C++ for handling data associated with the | |
158 | exception is clearly not thread-safe. For a thread-based | |
159 | environment, another mechanism must be used (possibly using a | |
160 | per-thread allocation mechanism if the size of the area that needs | |
161 | to be allocated isn't known at compile time.) | |
162 | ||
163 | Internally-generated exception regions (cleanups) are marked by | |
164 | calling expand_eh_region_start () to mark the start of the region, | |
165 | and expand_eh_region_end (handler) is used to both designate the | |
166 | end of the region and to associate a specified handler/cleanup with | |
167 | the region. The rtl code in HANDLER will be invoked whenever an | |
168 | exception occurs in the region between the calls to | |
169 | expand_eh_region_start and expand_eh_region_end. After HANDLER is | |
170 | executed, additional code is emitted to handle rethrowing the | |
171 | exception to the outer exception handler. The code for HANDLER will | |
172 | be emitted at the end of the function. | |
12670d88 RK |
173 | |
174 | TARGET_EXPRs can also be used to designate exception regions. A | |
175 | TARGET_EXPR gives an unwind-protect style interface commonly used | |
176 | in functional languages such as LISP. The associated expression is | |
2ed18e63 MS |
177 | evaluated, and whether or not it (or any of the functions that it |
178 | calls) throws an exception, the protect expression is always | |
179 | invoked. This implementation takes care of the details of | |
180 | associating an exception table entry with the expression and | |
181 | generating the necessary code (it actually emits the protect | |
182 | expression twice, once for normal flow and once for the exception | |
183 | case). As for the other handlers, the code for the exception case | |
184 | will be emitted at the end of the function. | |
185 | ||
186 | Cleanups can also be specified by using add_partial_entry (handler) | |
187 | and end_protect_partials (). add_partial_entry creates the start of | |
188 | a new exception region; HANDLER will be invoked if an exception is | |
189 | thrown with the context of the region between the calls to | |
190 | add_partial_entry and end_protect_partials. end_protect_partials is | |
191 | used to mark the end of these regions. add_partial_entry can be | |
192 | called as many times as needed before calling end_protect_partials. | |
193 | However, end_protect_partials should only be invoked once for each | |
194 | group of calls to add_partial_entry () as the entries are queued | |
195 | and all of the outstanding entries are processed simultaneously | |
196 | when end_protect_partials is invoked. Similarly to the other | |
197 | handlers, the code for HANDLER will be emitted at the end of the | |
198 | function. | |
12670d88 RK |
199 | |
200 | The generated RTL for an exception region includes | |
201 | NOTE_INSN_EH_REGION_BEG and NOTE_INSN_EH_REGION_END notes that mark | |
202 | the start and end of the exception region. A unique label is also | |
2ed18e63 MS |
203 | generated at the start of the exception region, which is available |
204 | by looking at the ehstack variable. The topmost entry corresponds | |
205 | to the current region. | |
12670d88 RK |
206 | |
207 | In the current implementation, an exception can only be thrown from | |
208 | a function call (since the mechanism used to actually throw an | |
209 | exception involves calling __throw). If an exception region is | |
210 | created but no function calls occur within that region, the region | |
2ed18e63 MS |
211 | can be safely optimized away (along with its exception handlers) |
212 | since no exceptions can ever be caught in that region. | |
12670d88 RK |
213 | |
214 | Unwinding the stack: | |
215 | ||
216 | The details of unwinding the stack to the next frame can be rather | |
217 | complex. While in many cases a generic __unwind_function () routine | |
218 | can be used by the generated exception handling code to do this, it | |
219 | is often necessary to generate inline code to do the unwinding. | |
220 | ||
221 | Whether or not these inlined unwinders are necessary is | |
222 | target-specific. | |
223 | ||
224 | By default, if the target-specific backend doesn't supply a | |
225 | definition for __unwind_function (), inlined unwinders will be used | |
226 | instead. The main tradeoff here is in text space utilization. | |
227 | Obviously, if inline unwinders have to be generated repeatedly, | |
2ed18e63 MS |
228 | this uses much more space than if a single routine is used. |
229 | ||
230 | However, it is simply not possible on some platforms to write a | |
231 | generalized routine for doing stack unwinding without having some | |
232 | form of additional data associated with each function. The current | |
233 | implementation encodes this data in the form of additional machine | |
234 | instructions. This is clearly not desirable, as it is extremely | |
235 | inefficient. The next implementation will provide a set of metadata | |
236 | for each function that will provide the needed information. | |
12670d88 RK |
237 | |
238 | The backend macro DOESNT_NEED_UNWINDER is used to conditionalize | |
239 | whether or not per-function unwinders are needed. If DOESNT_NEED_UNWINDER | |
240 | is defined and has a non-zero value, a per-function unwinder is | |
241 | not emitted for the current function. | |
242 | ||
243 | On some platforms it is possible that neither __unwind_function () | |
244 | nor inlined unwinders are available. For these platforms it is not | |
245 | possible to throw through a function call, and abort () will be | |
2ed18e63 MS |
246 | invoked instead of performing the throw. |
247 | ||
248 | Future directions: | |
249 | ||
250 | Currently __throw () makes no differentiation between cleanups and | |
251 | user-defined exception regions. While this makes the implementation | |
252 | simple, it also implies that it is impossible to determine if a | |
253 | user-defined exception handler exists for a given exception without | |
254 | completely unwinding the stack in the process. This is undesirable | |
255 | from the standpoint of debugging, as ideally it would be possible | |
256 | to trap unhandled exceptions in the debugger before the process of | |
257 | unwinding has even started. | |
258 | ||
259 | This problem can be solved by marking user-defined handlers in a | |
260 | special way (probably by adding additional bits to exception_table_list). | |
261 | A two-pass scheme could then be used by __throw () to iterate | |
262 | through the table. The first pass would search for a relevant | |
263 | user-defined handler for the current context of the throw, and if | |
264 | one is found, the second pass would then invoke all needed cleanups | |
265 | before jumping to the user-defined handler. | |
266 | ||
267 | Many languages (including C++ and Ada) make execution of a | |
268 | user-defined handler conditional on the "type" of the exception | |
269 | thrown. (The type of the exception is actually the type of the data | |
270 | that is thrown with the exception.) It will thus be necessary for | |
271 | __throw () to be able to determine if a given user-defined | |
272 | exception handler will actually be executed, given the type of | |
273 | exception. | |
274 | ||
275 | One scheme is to add additional information to exception_table_list | |
276 | as to the types of exceptions accepted by each handler. __throw () | |
277 | can do the type comparisons and then determine if the handler is | |
278 | actually going to be executed. | |
279 | ||
280 | There is currently no significant level of debugging support | |
281 | available, other than to place a breakpoint on __throw (). While | |
282 | this is sufficient in most cases, it would be helpful to be able to | |
283 | know where a given exception was going to be thrown to before it is | |
284 | actually thrown, and to be able to choose between stopping before | |
285 | every exception region (including cleanups), or just user-defined | |
286 | exception regions. This should be possible to do in the two-pass | |
287 | scheme by adding additional labels to __throw () for appropriate | |
288 | breakpoints, and additional debugger commands could be added to | |
289 | query various state variables to determine what actions are to be | |
290 | performed next. | |
291 | ||
292 | Another major problem that is being worked on is the issue with | |
293 | stack unwinding on various platforms. Currently the only platform | |
294 | that has support for __unwind_function () is the Sparc; all other | |
295 | ports require per-function unwinders, which causes large amounts of | |
296 | code bloat. | |
297 | ||
298 | Ideally it would be possible to store a small set of metadata with | |
299 | each function that would then make it possible to write a | |
300 | __unwind_function () for every platform. This would eliminate the | |
301 | need for per-function unwinders. | |
302 | ||
303 | The main reason the data is needed is that on some platforms the | |
304 | order and types of data stored on the stack can vary depending on | |
305 | the type of function, its arguments and returned values, and the | |
306 | compilation options used (optimization versus non-optimization, | |
307 | -fomit-frame-pointer, processor variations, etc). | |
308 | ||
309 | Unfortunately, this also means that throwing through functions that | |
310 | aren't compiled with exception handling support will still not be | |
311 | possible on some platforms. This problem is currently being | |
312 | investigated, but no solutions have been found that do not imply | |
313 | some unacceptable performance penalties. */ | |
4956d07c MS |
314 | |
315 | ||
316 | #include "config.h" | |
317 | #include <stdio.h> | |
318 | #include "rtl.h" | |
319 | #include "tree.h" | |
320 | #include "flags.h" | |
321 | #include "except.h" | |
322 | #include "function.h" | |
323 | #include "insn-flags.h" | |
324 | #include "expr.h" | |
325 | #include "insn-codes.h" | |
326 | #include "regs.h" | |
327 | #include "hard-reg-set.h" | |
328 | #include "insn-config.h" | |
329 | #include "recog.h" | |
330 | #include "output.h" | |
12670d88 | 331 | #include "assert.h" |
4956d07c | 332 | |
12670d88 | 333 | /* A list of labels used for exception handlers. Created by |
4956d07c MS |
334 | find_exception_handler_labels for the optimization passes. */ |
335 | ||
336 | rtx exception_handler_labels; | |
337 | ||
12670d88 RK |
338 | /* Nonzero means that __throw was invoked. |
339 | ||
340 | This is used by the C++ frontend to know if code needs to be emitted | |
341 | for __throw or not. */ | |
4956d07c MS |
342 | |
343 | int throw_used; | |
344 | ||
345 | /* A stack used for keeping track of the currectly active exception | |
12670d88 | 346 | handling region. As each exception region is started, an entry |
4956d07c MS |
347 | describing the region is pushed onto this stack. The current |
348 | region can be found by looking at the top of the stack, and as we | |
12670d88 RK |
349 | exit regions, the corresponding entries are popped. |
350 | ||
351 | Entries cannot overlap; they must be nested. So there is only one | |
352 | entry at most that corresponds to the current instruction, and that | |
353 | is the entry on the top of the stack. */ | |
4956d07c MS |
354 | |
355 | struct eh_stack ehstack; | |
356 | ||
12670d88 RK |
357 | /* A queue used for tracking which exception regions have closed but |
358 | whose handlers have not yet been expanded. Regions are emitted in | |
359 | groups in an attempt to improve paging performance. | |
360 | ||
361 | As we exit a region, we enqueue a new entry. The entries are then | |
362 | dequeued during expand_leftover_cleanups () and expand_start_all_catch (), | |
363 | ||
364 | We should redo things so that we either take RTL for the handler, | |
365 | or we expand the handler expressed as a tree immediately at region | |
366 | end time. */ | |
4956d07c MS |
367 | |
368 | struct eh_queue ehqueue; | |
369 | ||
12670d88 RK |
370 | /* Insns for all of the exception handlers for the current function. |
371 | They are currently emitted by the frontend code. */ | |
4956d07c MS |
372 | |
373 | rtx catch_clauses; | |
374 | ||
12670d88 RK |
375 | /* A TREE_CHAINed list of handlers for regions that are not yet |
376 | closed. The TREE_VALUE of each entry contains the handler for the | |
377 | corresponding entry on the ehstack. */ | |
4956d07c | 378 | |
12670d88 | 379 | static tree protect_list; |
4956d07c MS |
380 | |
381 | /* Stacks to keep track of various labels. */ | |
382 | ||
12670d88 RK |
383 | /* Keeps track of the label to resume to should one want to resume |
384 | normal control flow out of a handler (instead of, say, returning to | |
385 | the caller of the current function or exiting the program). Also | |
386 | used as the context of a throw to rethrow an exception to the outer | |
387 | exception region. */ | |
4956d07c MS |
388 | |
389 | struct label_node *caught_return_label_stack = NULL; | |
390 | ||
12670d88 | 391 | /* A random data area for the front end's own use. */ |
4956d07c MS |
392 | |
393 | struct label_node *false_label_stack = NULL; | |
394 | ||
843e8335 | 395 | /* The rtx and the tree for the saved PC value. */ |
4956d07c MS |
396 | |
397 | rtx eh_saved_pc_rtx; | |
843e8335 | 398 | tree eh_saved_pc; |
4956d07c MS |
399 | |
400 | rtx expand_builtin_return_addr PROTO((enum built_in_function, int, rtx)); | |
401 | \f | |
402 | /* Various support routines to manipulate the various data structures | |
403 | used by the exception handling code. */ | |
404 | ||
405 | /* Push a label entry onto the given STACK. */ | |
406 | ||
407 | void | |
408 | push_label_entry (stack, rlabel, tlabel) | |
409 | struct label_node **stack; | |
410 | rtx rlabel; | |
411 | tree tlabel; | |
412 | { | |
413 | struct label_node *newnode | |
414 | = (struct label_node *) xmalloc (sizeof (struct label_node)); | |
415 | ||
416 | if (rlabel) | |
417 | newnode->u.rlabel = rlabel; | |
418 | else | |
419 | newnode->u.tlabel = tlabel; | |
420 | newnode->chain = *stack; | |
421 | *stack = newnode; | |
422 | } | |
423 | ||
424 | /* Pop a label entry from the given STACK. */ | |
425 | ||
426 | rtx | |
427 | pop_label_entry (stack) | |
428 | struct label_node **stack; | |
429 | { | |
430 | rtx label; | |
431 | struct label_node *tempnode; | |
432 | ||
433 | if (! *stack) | |
434 | return NULL_RTX; | |
435 | ||
436 | tempnode = *stack; | |
437 | label = tempnode->u.rlabel; | |
438 | *stack = (*stack)->chain; | |
439 | free (tempnode); | |
440 | ||
441 | return label; | |
442 | } | |
443 | ||
444 | /* Return the top element of the given STACK. */ | |
445 | ||
446 | tree | |
447 | top_label_entry (stack) | |
448 | struct label_node **stack; | |
449 | { | |
450 | if (! *stack) | |
451 | return NULL_TREE; | |
452 | ||
453 | return (*stack)->u.tlabel; | |
454 | } | |
455 | ||
12670d88 | 456 | /* Make a copy of ENTRY using xmalloc to allocate the space. */ |
4956d07c MS |
457 | |
458 | static struct eh_entry * | |
459 | copy_eh_entry (entry) | |
460 | struct eh_entry *entry; | |
461 | { | |
462 | struct eh_entry *newentry; | |
463 | ||
464 | newentry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry)); | |
465 | bcopy ((char *) entry, (char *) newentry, sizeof (struct eh_entry)); | |
466 | ||
467 | return newentry; | |
468 | } | |
469 | ||
12670d88 RK |
470 | /* Push a new eh_node entry onto STACK, and return the start label for |
471 | the entry. */ | |
4956d07c MS |
472 | |
473 | static rtx | |
474 | push_eh_entry (stack) | |
475 | struct eh_stack *stack; | |
476 | { | |
477 | struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node)); | |
478 | struct eh_entry *entry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry)); | |
479 | ||
480 | entry->start_label = gen_label_rtx (); | |
481 | entry->end_label = gen_label_rtx (); | |
482 | entry->exception_handler_label = gen_label_rtx (); | |
483 | entry->finalization = NULL_TREE; | |
484 | ||
485 | node->entry = entry; | |
486 | node->chain = stack->top; | |
487 | stack->top = node; | |
488 | ||
489 | return entry->start_label; | |
490 | } | |
491 | ||
492 | /* Pop an entry from the given STACK. */ | |
493 | ||
494 | static struct eh_entry * | |
495 | pop_eh_entry (stack) | |
496 | struct eh_stack *stack; | |
497 | { | |
498 | struct eh_node *tempnode; | |
499 | struct eh_entry *tempentry; | |
500 | ||
501 | tempnode = stack->top; | |
502 | tempentry = tempnode->entry; | |
503 | stack->top = stack->top->chain; | |
504 | free (tempnode); | |
505 | ||
506 | return tempentry; | |
507 | } | |
508 | ||
509 | /* Enqueue an ENTRY onto the given QUEUE. */ | |
510 | ||
511 | static void | |
512 | enqueue_eh_entry (queue, entry) | |
513 | struct eh_queue *queue; | |
514 | struct eh_entry *entry; | |
515 | { | |
516 | struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node)); | |
517 | ||
518 | node->entry = entry; | |
519 | node->chain = NULL; | |
520 | ||
521 | if (queue->head == NULL) | |
522 | { | |
523 | queue->head = node; | |
524 | } | |
525 | else | |
526 | { | |
527 | queue->tail->chain = node; | |
528 | } | |
529 | queue->tail = node; | |
530 | } | |
531 | ||
532 | /* Dequeue an entry from the given QUEUE. */ | |
533 | ||
534 | static struct eh_entry * | |
535 | dequeue_eh_entry (queue) | |
536 | struct eh_queue *queue; | |
537 | { | |
538 | struct eh_node *tempnode; | |
539 | struct eh_entry *tempentry; | |
540 | ||
541 | if (queue->head == NULL) | |
542 | return NULL; | |
543 | ||
544 | tempnode = queue->head; | |
545 | queue->head = queue->head->chain; | |
546 | ||
547 | tempentry = tempnode->entry; | |
548 | free (tempnode); | |
549 | ||
550 | return tempentry; | |
551 | } | |
552 | \f | |
553 | /* Routine to see if exception exception handling is turned on. | |
554 | DO_WARN is non-zero if we want to inform the user that exception | |
12670d88 RK |
555 | handling is turned off. |
556 | ||
557 | This is used to ensure that -fexceptions has been specified if the | |
558 | compiler tries to use any exception-specific functions. */ | |
4956d07c MS |
559 | |
560 | int | |
561 | doing_eh (do_warn) | |
562 | int do_warn; | |
563 | { | |
564 | if (! flag_exceptions) | |
565 | { | |
566 | static int warned = 0; | |
567 | if (! warned && do_warn) | |
568 | { | |
569 | error ("exception handling disabled, use -fexceptions to enable"); | |
570 | warned = 1; | |
571 | } | |
572 | return 0; | |
573 | } | |
574 | return 1; | |
575 | } | |
576 | ||
12670d88 RK |
577 | /* Given a return address in ADDR, determine the address we should use |
578 | to find the corresponding EH region. */ | |
4956d07c MS |
579 | |
580 | rtx | |
581 | eh_outer_context (addr) | |
582 | rtx addr; | |
583 | { | |
584 | /* First mask out any unwanted bits. */ | |
585 | #ifdef MASK_RETURN_ADDR | |
586 | emit_insn (gen_rtx (SET, Pmode, | |
587 | addr, | |
588 | gen_rtx (AND, Pmode, | |
589 | addr, MASK_RETURN_ADDR))); | |
590 | #endif | |
591 | ||
12670d88 RK |
592 | /* Then subtract out enough to get into the appropriate region. If |
593 | this is defined, assume we don't need to subtract anything as it | |
594 | is already within the correct region. */ | |
4956d07c MS |
595 | #if ! defined (RETURN_ADDR_OFFSET) |
596 | addr = plus_constant (addr, -1); | |
597 | #endif | |
598 | ||
599 | return addr; | |
600 | } | |
601 | ||
12670d88 RK |
602 | /* Start a new exception region and push the HANDLER for the region |
603 | onto protect_list. All of the regions created with add_partial_entry | |
604 | will be ended when end_protect_partials () is invoked. */ | |
605 | ||
606 | void | |
607 | add_partial_entry (handler) | |
608 | tree handler; | |
609 | { | |
610 | expand_eh_region_start (); | |
611 | ||
612 | /* Make sure the entry is on the correct obstack. */ | |
613 | push_obstacks_nochange (); | |
614 | resume_temporary_allocation (); | |
615 | protect_list = tree_cons (NULL_TREE, handler, protect_list); | |
616 | pop_obstacks (); | |
617 | } | |
618 | ||
619 | /* Output a note marking the start of an exception handling region. | |
620 | All instructions emitted after this point are considered to be part | |
621 | of the region until expand_eh_region_end () is invoked. */ | |
4956d07c MS |
622 | |
623 | void | |
624 | expand_eh_region_start () | |
625 | { | |
626 | rtx note; | |
627 | ||
628 | /* This is the old code. */ | |
629 | if (! doing_eh (0)) | |
630 | return; | |
631 | ||
632 | #if 0 | |
633 | /* Maybe do this to prevent jumping in and so on... */ | |
634 | pushlevel (0); | |
635 | #endif | |
636 | ||
637 | note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG); | |
638 | emit_label (push_eh_entry (&ehstack)); | |
639 | NOTE_BLOCK_NUMBER (note) | |
640 | = CODE_LABEL_NUMBER (ehstack.top->entry->exception_handler_label); | |
641 | } | |
642 | ||
12670d88 RK |
643 | /* Output a note marking the end of the exception handling region on |
644 | the top of ehstack. | |
645 | ||
646 | HANDLER is either the cleanup for the exception region, or if we're | |
647 | marking the end of a try block, HANDLER is integer_zero_node. | |
648 | ||
649 | HANDLER will be transformed to rtl when expand_leftover_cleanups () | |
650 | is invoked. */ | |
4956d07c MS |
651 | |
652 | void | |
653 | expand_eh_region_end (handler) | |
654 | tree handler; | |
655 | { | |
656 | rtx note; | |
657 | ||
658 | struct eh_entry *entry; | |
659 | ||
660 | if (! doing_eh (0)) | |
661 | return; | |
662 | ||
663 | entry = pop_eh_entry (&ehstack); | |
664 | ||
665 | note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END); | |
666 | NOTE_BLOCK_NUMBER (note) = CODE_LABEL_NUMBER (entry->exception_handler_label); | |
667 | ||
12670d88 | 668 | /* Emit a label marking the end of this exception region. */ |
4956d07c MS |
669 | emit_label (entry->end_label); |
670 | ||
671 | /* Put in something that takes up space, as otherwise the end | |
12670d88 RK |
672 | address for this EH region could have the exact same address as |
673 | its outer region. This would cause us to miss the fact that | |
674 | resuming exception handling with this PC value would be inside | |
675 | the outer region. */ | |
4956d07c MS |
676 | emit_insn (gen_nop ()); |
677 | ||
678 | entry->finalization = handler; | |
679 | ||
680 | enqueue_eh_entry (&ehqueue, entry); | |
681 | ||
4956d07c | 682 | #if 0 |
12670d88 | 683 | /* Maybe do this to prevent jumping in and so on... */ |
4956d07c MS |
684 | poplevel (1, 0, 0); |
685 | #endif | |
686 | } | |
687 | ||
12670d88 RK |
688 | /* Emit a call to __throw and note that we threw something, so we know |
689 | we need to generate the necessary code for __throw. | |
690 | ||
691 | Before invoking throw, the __eh_pc variable must have been set up | |
692 | to contain the PC being thrown from. This address is used by | |
693 | __throw () to determine which exception region (if any) is | |
694 | responsible for handling the exception. */ | |
4956d07c MS |
695 | |
696 | static void | |
697 | emit_throw () | |
698 | { | |
699 | #ifdef JUMP_TO_THROW | |
700 | emit_indirect_jump (throw_libfunc); | |
701 | #else | |
ccbe9ffc | 702 | SYMBOL_REF_USED (throw_libfunc) = 1; |
4956d07c MS |
703 | emit_library_call (throw_libfunc, 0, VOIDmode, 0); |
704 | #endif | |
705 | throw_used = 1; | |
706 | emit_barrier (); | |
707 | } | |
708 | ||
12670d88 RK |
709 | /* An internal throw with an indirect CONTEXT we want to throw from. |
710 | CONTEXT evaluates to the context of the throw. */ | |
4956d07c | 711 | |
12670d88 | 712 | static void |
4956d07c MS |
713 | expand_internal_throw_indirect (context) |
714 | rtx context; | |
715 | { | |
843e8335 | 716 | assemble_external (eh_saved_pc); |
4956d07c MS |
717 | emit_move_insn (eh_saved_pc_rtx, context); |
718 | emit_throw (); | |
719 | } | |
720 | ||
12670d88 RK |
721 | /* An internal throw with a direct CONTEXT we want to throw from. |
722 | CONTEXT must be a label; its address will be used as the context of | |
723 | the throw. */ | |
4956d07c MS |
724 | |
725 | void | |
726 | expand_internal_throw (context) | |
727 | rtx context; | |
728 | { | |
729 | expand_internal_throw_indirect (gen_rtx (LABEL_REF, Pmode, context)); | |
730 | } | |
731 | ||
732 | /* Called from expand_exception_blocks and expand_end_catch_block to | |
12670d88 | 733 | emit any pending handlers/cleanups queued from expand_eh_region_end (). */ |
4956d07c MS |
734 | |
735 | void | |
736 | expand_leftover_cleanups () | |
737 | { | |
738 | struct eh_entry *entry; | |
739 | ||
740 | while ((entry = dequeue_eh_entry (&ehqueue)) != 0) | |
741 | { | |
742 | rtx prev; | |
743 | ||
12670d88 RK |
744 | /* A leftover try block. Shouldn't be one here. */ |
745 | if (entry->finalization == integer_zero_node) | |
746 | abort (); | |
747 | ||
748 | /* Output the label for the start of the exception handler. */ | |
4956d07c MS |
749 | emit_label (entry->exception_handler_label); |
750 | ||
12670d88 | 751 | /* And now generate the insns for the handler. */ |
4956d07c MS |
752 | expand_expr (entry->finalization, const0_rtx, VOIDmode, 0); |
753 | ||
754 | prev = get_last_insn (); | |
755 | if (! (prev && GET_CODE (prev) == BARRIER)) | |
756 | { | |
757 | /* The below can be optimized away, and we could just fall into the | |
758 | next EH handler, if we are certain they are nested. */ | |
12670d88 RK |
759 | /* Emit code to throw to the outer context if we fall off |
760 | the end of the handler. */ | |
4956d07c MS |
761 | expand_internal_throw (entry->end_label); |
762 | } | |
763 | ||
4956d07c MS |
764 | free (entry); |
765 | } | |
766 | } | |
767 | ||
12670d88 RK |
768 | /* Called at the start of a block of try statements. */ |
769 | void | |
770 | expand_start_try_stmts () | |
771 | { | |
772 | if (! doing_eh (1)) | |
773 | return; | |
774 | ||
775 | expand_eh_region_start (); | |
776 | } | |
777 | ||
778 | /* Generate RTL for the start of a group of catch clauses. | |
779 | ||
780 | It is responsible for starting a new instruction sequence for the | |
781 | instructions in the catch block, and expanding the handlers for the | |
782 | internally-generated exception regions nested within the try block | |
783 | corresponding to this catch block. */ | |
4956d07c MS |
784 | |
785 | void | |
786 | expand_start_all_catch () | |
787 | { | |
788 | struct eh_entry *entry; | |
789 | tree label; | |
790 | ||
791 | if (! doing_eh (1)) | |
792 | return; | |
793 | ||
12670d88 RK |
794 | /* End the try block. */ |
795 | expand_eh_region_end (integer_zero_node); | |
796 | ||
4956d07c MS |
797 | emit_line_note (input_filename, lineno); |
798 | label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); | |
799 | ||
12670d88 RK |
800 | /* The label for the exception handling block that we will save. |
801 | This is Lresume in the documention. */ | |
4956d07c MS |
802 | expand_label (label); |
803 | ||
804 | /* Put in something that takes up space, as otherwise the end | |
805 | address for the EH region could have the exact same address as | |
806 | the outer region, causing us to miss the fact that resuming | |
807 | exception handling with this PC value would be inside the outer | |
808 | region. */ | |
809 | emit_insn (gen_nop ()); | |
810 | ||
12670d88 RK |
811 | /* Push the label that points to where normal flow is resumed onto |
812 | the top of the label stack. */ | |
4956d07c MS |
813 | push_label_entry (&caught_return_label_stack, NULL_RTX, label); |
814 | ||
815 | /* Start a new sequence for all the catch blocks. We will add this | |
12670d88 | 816 | to the global sequence catch_clauses when we have completed all |
4956d07c MS |
817 | the handlers in this handler-seq. */ |
818 | start_sequence (); | |
819 | ||
820 | while (1) | |
821 | { | |
822 | rtx prev; | |
823 | ||
824 | entry = dequeue_eh_entry (&ehqueue); | |
12670d88 RK |
825 | /* Emit the label for the exception handler for this region, and |
826 | expand the code for the handler. | |
827 | ||
828 | Note that a catch region is handled as a side-effect here; | |
829 | for a try block, entry->finalization will contain | |
830 | integer_zero_node, so no code will be generated in the | |
831 | expand_expr call below. But, the label for the handler will | |
832 | still be emitted, so any code emitted after this point will | |
833 | end up being the handler. */ | |
4956d07c | 834 | emit_label (entry->exception_handler_label); |
4956d07c MS |
835 | expand_expr (entry->finalization, const0_rtx, VOIDmode, 0); |
836 | ||
12670d88 | 837 | /* When we get down to the matching entry for this try block, stop. */ |
4956d07c | 838 | if (entry->finalization == integer_zero_node) |
12670d88 RK |
839 | { |
840 | /* Don't forget to free this entry. */ | |
841 | free (entry); | |
842 | break; | |
843 | } | |
4956d07c MS |
844 | |
845 | prev = get_last_insn (); | |
12670d88 | 846 | if (prev == NULL || GET_CODE (prev) != BARRIER) |
4956d07c | 847 | { |
12670d88 RK |
848 | /* Code to throw out to outer context when we fall off end |
849 | of the handler. We can't do this here for catch blocks, | |
850 | so it's done in expand_end_all_catch () instead. | |
851 | ||
852 | The below can be optimized away (and we could just fall | |
853 | into the next EH handler) if we are certain they are | |
854 | nested. */ | |
855 | ||
4956d07c MS |
856 | expand_internal_throw (entry->end_label); |
857 | } | |
4956d07c MS |
858 | free (entry); |
859 | } | |
860 | } | |
861 | ||
12670d88 RK |
862 | /* Finish up the catch block. At this point all the insns for the |
863 | catch clauses have already been generated, so we only have to add | |
864 | them to the catch_clauses list. We also want to make sure that if | |
865 | we fall off the end of the catch clauses that we rethrow to the | |
866 | outer EH region. */ | |
4956d07c MS |
867 | |
868 | void | |
869 | expand_end_all_catch () | |
870 | { | |
871 | rtx new_catch_clause; | |
872 | ||
873 | if (! doing_eh (1)) | |
874 | return; | |
875 | ||
876 | /* Code to throw out to outer context, if we fall off end of catch | |
12670d88 RK |
877 | handlers. This is rethrow (Lresume, same id, same obj) in the |
878 | documentation. We use Lresume because we know that it will throw | |
879 | to the correct context. | |
880 | ||
881 | In other words, if the catch handler doesn't exit or return, we | |
882 | do a "throw" (using the address of Lresume as the point being | |
883 | thrown from) so that the outer EH region can then try to process | |
884 | the exception. */ | |
885 | ||
4956d07c MS |
886 | expand_internal_throw (DECL_RTL (top_label_entry (&caught_return_label_stack))); |
887 | ||
888 | /* Now we have the complete catch sequence. */ | |
889 | new_catch_clause = get_insns (); | |
890 | end_sequence (); | |
891 | ||
892 | /* This level of catch blocks is done, so set up the successful | |
893 | catch jump label for the next layer of catch blocks. */ | |
894 | pop_label_entry (&caught_return_label_stack); | |
895 | ||
896 | /* Add the new sequence of catches to the main one for this function. */ | |
897 | push_to_sequence (catch_clauses); | |
898 | emit_insns (new_catch_clause); | |
899 | catch_clauses = get_insns (); | |
900 | end_sequence (); | |
901 | ||
902 | /* Here we fall through into the continuation code. */ | |
903 | } | |
904 | ||
12670d88 RK |
905 | /* End all the pending exception regions on protect_list. The handlers |
906 | will be emitted when expand_leftover_cleanups () is invoked. */ | |
4956d07c MS |
907 | |
908 | void | |
909 | end_protect_partials () | |
910 | { | |
911 | while (protect_list) | |
912 | { | |
913 | expand_eh_region_end (TREE_VALUE (protect_list)); | |
914 | protect_list = TREE_CHAIN (protect_list); | |
915 | } | |
916 | } | |
917 | \f | |
918 | /* The exception table that we build that is used for looking up and | |
12670d88 RK |
919 | dispatching exceptions, the current number of entries, and its |
920 | maximum size before we have to extend it. | |
921 | ||
922 | The number in eh_table is the code label number of the exception | |
923 | handler for the region. This is added by add_eh_table_entry () and | |
924 | used by output_exception_table_entry (). */ | |
925 | ||
4956d07c MS |
926 | static int *eh_table; |
927 | static int eh_table_size; | |
928 | static int eh_table_max_size; | |
929 | ||
930 | /* Note the need for an exception table entry for region N. If we | |
12670d88 RK |
931 | don't need to output an explicit exception table, avoid all of the |
932 | extra work. | |
933 | ||
934 | Called from final_scan_insn when a NOTE_INSN_EH_REGION_BEG is seen. | |
935 | N is the NOTE_BLOCK_NUMBER of the note, which comes from the code | |
936 | label number of the exception handler for the region. */ | |
4956d07c MS |
937 | |
938 | void | |
939 | add_eh_table_entry (n) | |
940 | int n; | |
941 | { | |
942 | #ifndef OMIT_EH_TABLE | |
943 | if (eh_table_size >= eh_table_max_size) | |
944 | { | |
945 | if (eh_table) | |
946 | { | |
947 | eh_table_max_size += eh_table_max_size>>1; | |
948 | ||
949 | if (eh_table_max_size < 0) | |
950 | abort (); | |
951 | ||
abf3bf38 JW |
952 | if ((eh_table = (int *) realloc (eh_table, |
953 | eh_table_max_size * sizeof (int))) | |
4956d07c MS |
954 | == 0) |
955 | fatal ("virtual memory exhausted"); | |
956 | } | |
957 | else | |
958 | { | |
959 | eh_table_max_size = 252; | |
960 | eh_table = (int *) xmalloc (eh_table_max_size * sizeof (int)); | |
961 | } | |
962 | } | |
963 | eh_table[eh_table_size++] = n; | |
964 | #endif | |
965 | } | |
966 | ||
12670d88 RK |
967 | /* Return a non-zero value if we need to output an exception table. |
968 | ||
969 | On some platforms, we don't have to output a table explicitly. | |
970 | This routine doesn't mean we don't have one. */ | |
4956d07c MS |
971 | |
972 | int | |
973 | exception_table_p () | |
974 | { | |
975 | if (eh_table) | |
976 | return 1; | |
977 | ||
978 | return 0; | |
979 | } | |
980 | ||
12670d88 RK |
981 | /* Output the entry of the exception table corresponding to to the |
982 | exception region numbered N to file FILE. | |
983 | ||
984 | N is the code label number corresponding to the handler of the | |
985 | region. */ | |
4956d07c MS |
986 | |
987 | static void | |
988 | output_exception_table_entry (file, n) | |
989 | FILE *file; | |
990 | int n; | |
991 | { | |
992 | char buf[256]; | |
993 | rtx sym; | |
994 | ||
995 | ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n); | |
996 | sym = gen_rtx (SYMBOL_REF, Pmode, buf); | |
997 | assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1); | |
998 | ||
999 | ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n); | |
1000 | sym = gen_rtx (SYMBOL_REF, Pmode, buf); | |
1001 | assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1); | |
1002 | ||
1003 | ASM_GENERATE_INTERNAL_LABEL (buf, "L", n); | |
1004 | sym = gen_rtx (SYMBOL_REF, Pmode, buf); | |
1005 | assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1); | |
1006 | ||
1007 | putc ('\n', file); /* blank line */ | |
1008 | } | |
1009 | ||
12670d88 | 1010 | /* Output the exception table if we have and need one. */ |
4956d07c MS |
1011 | |
1012 | void | |
1013 | output_exception_table () | |
1014 | { | |
1015 | int i; | |
1016 | extern FILE *asm_out_file; | |
1017 | ||
1018 | if (! doing_eh (0)) | |
1019 | return; | |
1020 | ||
1021 | exception_section (); | |
1022 | ||
1023 | /* Beginning marker for table. */ | |
1024 | assemble_align (GET_MODE_ALIGNMENT (ptr_mode)); | |
1025 | assemble_label ("__EXCEPTION_TABLE__"); | |
1026 | ||
1027 | assemble_integer (const0_rtx, POINTER_SIZE / BITS_PER_UNIT, 1); | |
1028 | assemble_integer (const0_rtx, POINTER_SIZE / BITS_PER_UNIT, 1); | |
1029 | assemble_integer (const0_rtx, POINTER_SIZE / BITS_PER_UNIT, 1); | |
1030 | putc ('\n', asm_out_file); /* blank line */ | |
1031 | ||
1032 | for (i = 0; i < eh_table_size; ++i) | |
1033 | output_exception_table_entry (asm_out_file, eh_table[i]); | |
1034 | ||
1035 | free (eh_table); | |
1036 | ||
1037 | /* Ending marker for table. */ | |
1038 | assemble_label ("__EXCEPTION_END__"); | |
1039 | assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1); | |
1040 | assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1); | |
1041 | assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1); | |
1042 | putc ('\n', asm_out_file); /* blank line */ | |
1043 | } | |
1044 | ||
1045 | /* Generate code to initialize the exception table at program startup | |
1046 | time. */ | |
1047 | ||
1048 | void | |
1049 | register_exception_table () | |
1050 | { | |
1051 | emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__register_exceptions"), 0, | |
1052 | VOIDmode, 1, | |
1053 | gen_rtx (SYMBOL_REF, Pmode, "__EXCEPTION_TABLE__"), | |
1054 | Pmode); | |
1055 | } | |
1056 | \f | |
12670d88 RK |
1057 | /* Emit the RTL for the start of the per-function unwinder for the |
1058 | current function. See emit_unwinder () for further information. | |
1059 | ||
1060 | DOESNT_NEED_UNWINDER is a target-specific macro that determines if | |
1061 | the current function actually needs a per-function unwinder or not. | |
1062 | By default, all functions need one. */ | |
4956d07c MS |
1063 | |
1064 | void | |
1065 | start_eh_unwinder () | |
1066 | { | |
1067 | #ifdef DOESNT_NEED_UNWINDER | |
1068 | if (DOESNT_NEED_UNWINDER) | |
1069 | return; | |
1070 | #endif | |
1071 | ||
1072 | expand_eh_region_start (); | |
1073 | } | |
1074 | ||
12670d88 | 1075 | /* Emit insns for the end of the per-function unwinder for the |
4956d07c MS |
1076 | current function. */ |
1077 | ||
1078 | void | |
1079 | end_eh_unwinder () | |
1080 | { | |
1081 | tree expr; | |
1082 | rtx return_val_rtx, ret_val, label, end, insns; | |
1083 | ||
1084 | if (! doing_eh (0)) | |
1085 | return; | |
1086 | ||
1087 | #ifdef DOESNT_NEED_UNWINDER | |
1088 | if (DOESNT_NEED_UNWINDER) | |
1089 | return; | |
1090 | #endif | |
1091 | ||
843e8335 MS |
1092 | assemble_external (eh_saved_pc); |
1093 | ||
4956d07c MS |
1094 | expr = make_node (RTL_EXPR); |
1095 | TREE_TYPE (expr) = void_type_node; | |
1096 | RTL_EXPR_RTL (expr) = const0_rtx; | |
1097 | TREE_SIDE_EFFECTS (expr) = 1; | |
1098 | start_sequence_for_rtl_expr (expr); | |
1099 | ||
12670d88 RK |
1100 | /* ret_val will contain the address of the code where the call |
1101 | to the current function occurred. */ | |
4956d07c MS |
1102 | ret_val = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS, |
1103 | 0, hard_frame_pointer_rtx); | |
1104 | return_val_rtx = copy_to_reg (ret_val); | |
1105 | ||
12670d88 RK |
1106 | /* Get the address we need to use to determine what exception |
1107 | handler should be invoked, and store it in __eh_pc. */ | |
4956d07c | 1108 | return_val_rtx = eh_outer_context (return_val_rtx); |
4956d07c MS |
1109 | emit_move_insn (eh_saved_pc_rtx, return_val_rtx); |
1110 | ||
12670d88 RK |
1111 | /* Either set things up so we do a return directly to __throw, or |
1112 | we return here instead. */ | |
4956d07c MS |
1113 | #ifdef JUMP_TO_THROW |
1114 | emit_move_insn (ret_val, throw_libfunc); | |
1115 | #else | |
1116 | label = gen_label_rtx (); | |
1117 | emit_move_insn (ret_val, gen_rtx (LABEL_REF, Pmode, label)); | |
1118 | #endif | |
1119 | ||
1120 | #ifdef RETURN_ADDR_OFFSET | |
1121 | return_val_rtx = plus_constant (ret_val, -RETURN_ADDR_OFFSET); | |
1122 | if (return_val_rtx != ret_val) | |
1123 | emit_move_insn (ret_val, return_val_rtx); | |
1124 | #endif | |
1125 | ||
1126 | end = gen_label_rtx (); | |
1127 | emit_jump (end); | |
1128 | ||
1129 | RTL_EXPR_SEQUENCE (expr) = get_insns (); | |
1130 | end_sequence (); | |
1131 | expand_eh_region_end (expr); | |
1132 | ||
1133 | emit_jump (end); | |
1134 | ||
1135 | #ifndef JUMP_TO_THROW | |
1136 | emit_label (label); | |
1137 | emit_throw (); | |
1138 | #endif | |
1139 | ||
1140 | expand_leftover_cleanups (); | |
1141 | ||
1142 | emit_label (end); | |
1143 | } | |
1144 | ||
12670d88 RK |
1145 | /* If necessary, emit insns for the per function unwinder for the |
1146 | current function. Called after all the code that needs unwind | |
1147 | protection is output. | |
1148 | ||
1149 | The unwinder takes care of catching any exceptions that have not | |
1150 | been previously caught within the function, unwinding the stack to | |
1151 | the next frame, and rethrowing using the address of the current | |
1152 | function's caller as the context of the throw. | |
1153 | ||
1154 | On some platforms __throw can do this by itself (or with the help | |
1155 | of __unwind_function) so the per-function unwinder is | |
1156 | unnecessary. | |
1157 | ||
1158 | We cannot place the unwinder into the function until after we know | |
1159 | we are done inlining, as we don't want to have more than one | |
1160 | unwinder per non-inlined function. */ | |
4956d07c MS |
1161 | |
1162 | void | |
1163 | emit_unwinder () | |
1164 | { | |
12670d88 | 1165 | rtx insns, insn; |
4956d07c MS |
1166 | |
1167 | start_sequence (); | |
1168 | start_eh_unwinder (); | |
1169 | insns = get_insns (); | |
1170 | end_sequence (); | |
1171 | ||
12670d88 RK |
1172 | /* We place the start of the exception region associated with the |
1173 | per function unwinder at the top of the function. */ | |
4956d07c MS |
1174 | if (insns) |
1175 | emit_insns_after (insns, get_insns ()); | |
1176 | ||
12670d88 | 1177 | start_sequence (); |
4956d07c | 1178 | end_eh_unwinder (); |
12670d88 RK |
1179 | insns = get_insns (); |
1180 | end_sequence (); | |
1181 | ||
1182 | /* And we place the end of the exception region before the USE and | |
1183 | CLOBBER insns that may come at the end of the function. */ | |
1184 | if (insns == 0) | |
1185 | return; | |
1186 | ||
1187 | insn = get_last_insn (); | |
1188 | while (GET_CODE (insn) == NOTE | |
1189 | || (GET_CODE (insn) == INSN | |
1190 | && (GET_CODE (PATTERN (insn)) == USE | |
1191 | || GET_CODE (PATTERN (insn)) == CLOBBER))) | |
1192 | insn = PREV_INSN (insn); | |
1193 | ||
1194 | if (GET_CODE (insn) == CODE_LABEL | |
1195 | && GET_CODE (PREV_INSN (insn)) == BARRIER) | |
1196 | { | |
1197 | insn = PREV_INSN (insn); | |
1198 | } | |
1199 | else | |
1200 | { | |
1201 | rtx label = gen_label_rtx (); | |
1202 | emit_label_after (label, insn); | |
1203 | insn = emit_jump_insn_after (gen_jump (label), insn); | |
1204 | insn = emit_barrier_after (insn); | |
1205 | } | |
1206 | ||
1207 | emit_insns_after (insns, insn); | |
4956d07c MS |
1208 | } |
1209 | ||
12670d88 RK |
1210 | /* Scan the current insns and build a list of handler labels. The |
1211 | resulting list is placed in the global variable exception_handler_labels. | |
1212 | ||
1213 | It is called after the last exception handling region is added to | |
1214 | the current function (when the rtl is almost all built for the | |
1215 | current function) and before the jump optimization pass. */ | |
4956d07c MS |
1216 | |
1217 | void | |
1218 | find_exception_handler_labels () | |
1219 | { | |
1220 | rtx insn; | |
1221 | int max_labelno = max_label_num (); | |
1222 | int min_labelno = get_first_label_num (); | |
1223 | rtx *labels; | |
1224 | ||
1225 | exception_handler_labels = NULL_RTX; | |
1226 | ||
1227 | /* If we aren't doing exception handling, there isn't much to check. */ | |
1228 | if (! doing_eh (0)) | |
1229 | return; | |
1230 | ||
12670d88 | 1231 | /* Generate a handy reference to each label. */ |
4956d07c MS |
1232 | |
1233 | labels = (rtx *) alloca ((max_labelno - min_labelno) * sizeof (rtx)); | |
12670d88 RK |
1234 | |
1235 | /* Eeeeeeew. */ | |
4956d07c MS |
1236 | labels -= min_labelno; |
1237 | ||
1238 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
1239 | { | |
1240 | if (GET_CODE (insn) == CODE_LABEL) | |
1241 | if (CODE_LABEL_NUMBER (insn) >= min_labelno | |
1242 | && CODE_LABEL_NUMBER (insn) < max_labelno) | |
1243 | labels[CODE_LABEL_NUMBER (insn)] = insn; | |
1244 | } | |
1245 | ||
12670d88 RK |
1246 | /* For each start of a region, add its label to the list. */ |
1247 | ||
4956d07c MS |
1248 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
1249 | { | |
1250 | if (GET_CODE (insn) == NOTE | |
1251 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) | |
1252 | { | |
1253 | rtx label = NULL_RTX; | |
1254 | ||
1255 | if (NOTE_BLOCK_NUMBER (insn) >= min_labelno | |
1256 | && NOTE_BLOCK_NUMBER (insn) < max_labelno) | |
1257 | { | |
1258 | label = labels[NOTE_BLOCK_NUMBER (insn)]; | |
1259 | ||
1260 | if (label) | |
1261 | exception_handler_labels | |
1262 | = gen_rtx (EXPR_LIST, VOIDmode, | |
1263 | label, exception_handler_labels); | |
1264 | else | |
1265 | warning ("didn't find handler for EH region %d", | |
1266 | NOTE_BLOCK_NUMBER (insn)); | |
1267 | } | |
1268 | else | |
1269 | warning ("mismatched EH region %d", NOTE_BLOCK_NUMBER (insn)); | |
1270 | } | |
1271 | } | |
1272 | } | |
1273 | ||
12670d88 RK |
1274 | /* Perform sanity checking on the exception_handler_labels list. |
1275 | ||
1276 | Can be called after find_exception_handler_labels is called to | |
1277 | build the list of exception handlers for the current function and | |
1278 | before we finish processing the current function. */ | |
4956d07c MS |
1279 | |
1280 | void | |
1281 | check_exception_handler_labels () | |
1282 | { | |
1283 | rtx insn, handler; | |
1284 | ||
1285 | /* If we aren't doing exception handling, there isn't much to check. */ | |
1286 | if (! doing_eh (0)) | |
1287 | return; | |
1288 | ||
12670d88 RK |
1289 | /* Ensure that the CODE_LABEL_NUMBER for the CODE_LABEL entry point |
1290 | in each handler corresponds to the CODE_LABEL_NUMBER of the | |
1291 | handler. */ | |
1292 | ||
4956d07c MS |
1293 | for (handler = exception_handler_labels; |
1294 | handler; | |
1295 | handler = XEXP (handler, 1)) | |
1296 | { | |
1297 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
1298 | { | |
1299 | if (GET_CODE (insn) == CODE_LABEL) | |
1300 | { | |
1301 | if (CODE_LABEL_NUMBER (insn) | |
1302 | == CODE_LABEL_NUMBER (XEXP (handler, 0))) | |
1303 | { | |
1304 | if (insn != XEXP (handler, 0)) | |
1305 | warning ("mismatched handler %d", | |
1306 | CODE_LABEL_NUMBER (insn)); | |
1307 | break; | |
1308 | } | |
1309 | } | |
1310 | } | |
1311 | if (insn == NULL_RTX) | |
1312 | warning ("handler not found %d", | |
1313 | CODE_LABEL_NUMBER (XEXP (handler, 0))); | |
1314 | } | |
1315 | ||
12670d88 RK |
1316 | /* Now go through and make sure that for each region there is a |
1317 | corresponding label. */ | |
4956d07c MS |
1318 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
1319 | { | |
1320 | if (GET_CODE (insn) == NOTE | |
1321 | && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG || | |
1322 | NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)) | |
1323 | { | |
1324 | for (handler = exception_handler_labels; | |
1325 | handler; | |
1326 | handler = XEXP (handler, 1)) | |
1327 | { | |
1328 | if (CODE_LABEL_NUMBER (XEXP (handler, 0)) | |
1329 | == NOTE_BLOCK_NUMBER (insn)) | |
1330 | break; | |
1331 | } | |
1332 | if (handler == NULL_RTX) | |
1333 | warning ("region exists, no handler %d", | |
1334 | NOTE_BLOCK_NUMBER (insn)); | |
1335 | } | |
1336 | } | |
1337 | } | |
1338 | \f | |
1339 | /* This group of functions initializes the exception handling data | |
1340 | structures at the start of the compilation, initializes the data | |
12670d88 | 1341 | structures at the start of a function, and saves and restores the |
4956d07c MS |
1342 | exception handling data structures for the start/end of a nested |
1343 | function. */ | |
1344 | ||
1345 | /* Toplevel initialization for EH things. */ | |
1346 | ||
1347 | void | |
1348 | init_eh () | |
1349 | { | |
12670d88 RK |
1350 | /* Generate rtl to reference the variable in which the PC of the |
1351 | current context is saved. */ | |
843e8335 MS |
1352 | tree type = build_pointer_type (make_node (VOID_TYPE)); |
1353 | ||
1354 | eh_saved_pc = build_decl (VAR_DECL, get_identifier ("__eh_pc"), type); | |
1355 | DECL_EXTERNAL (eh_saved_pc) = 1; | |
1356 | TREE_PUBLIC (eh_saved_pc) = 1; | |
1357 | make_decl_rtl (eh_saved_pc, NULL_PTR, 1); | |
1358 | eh_saved_pc_rtx = DECL_RTL (eh_saved_pc); | |
4956d07c MS |
1359 | } |
1360 | ||
12670d88 | 1361 | /* Initialize the per-function EH information. */ |
4956d07c MS |
1362 | |
1363 | void | |
1364 | init_eh_for_function () | |
1365 | { | |
1366 | ehstack.top = 0; | |
1367 | ehqueue.head = ehqueue.tail = 0; | |
1368 | catch_clauses = NULL_RTX; | |
1369 | false_label_stack = 0; | |
1370 | caught_return_label_stack = 0; | |
1371 | protect_list = NULL_TREE; | |
1372 | } | |
1373 | ||
12670d88 RK |
1374 | /* Save some of the per-function EH info into the save area denoted by |
1375 | P. | |
1376 | ||
1377 | This is currently called from save_stmt_status (). */ | |
4956d07c MS |
1378 | |
1379 | void | |
1380 | save_eh_status (p) | |
1381 | struct function *p; | |
1382 | { | |
12670d88 RK |
1383 | assert (p != NULL); |
1384 | ||
4956d07c MS |
1385 | p->ehstack = ehstack; |
1386 | p->ehqueue = ehqueue; | |
1387 | p->catch_clauses = catch_clauses; | |
1388 | p->false_label_stack = false_label_stack; | |
1389 | p->caught_return_label_stack = caught_return_label_stack; | |
1390 | p->protect_list = protect_list; | |
1391 | ||
1392 | init_eh (); | |
1393 | } | |
1394 | ||
12670d88 RK |
1395 | /* Restore the per-function EH info saved into the area denoted by P. |
1396 | ||
1397 | This is currently called from restore_stmt_status. */ | |
4956d07c MS |
1398 | |
1399 | void | |
1400 | restore_eh_status (p) | |
1401 | struct function *p; | |
1402 | { | |
12670d88 RK |
1403 | assert (p != NULL); |
1404 | ||
4956d07c MS |
1405 | protect_list = p->protect_list; |
1406 | caught_return_label_stack = p->caught_return_label_stack; | |
1407 | false_label_stack = p->false_label_stack; | |
1408 | catch_clauses = p->catch_clauses; | |
1409 | ehqueue = p->ehqueue; | |
1410 | ehstack = p->ehstack; | |
1411 | } | |
1412 | \f | |
1413 | /* This section is for the exception handling specific optimization | |
1414 | pass. First are the internal routines, and then the main | |
1415 | optimization pass. */ | |
1416 | ||
1417 | /* Determine if the given INSN can throw an exception. */ | |
1418 | ||
1419 | static int | |
1420 | can_throw (insn) | |
1421 | rtx insn; | |
1422 | { | |
12670d88 | 1423 | /* Calls can always potentially throw exceptions. */ |
4956d07c MS |
1424 | if (GET_CODE (insn) == CALL_INSN) |
1425 | return 1; | |
1426 | ||
1427 | #ifdef ASYNCH_EXCEPTIONS | |
1428 | /* If we wanted asynchronous exceptions, then everything but NOTEs | |
12670d88 | 1429 | and CODE_LABELs could throw. */ |
4956d07c MS |
1430 | if (GET_CODE (insn) != NOTE && GET_CODE (insn) != CODE_LABEL) |
1431 | return 1; | |
1432 | #endif | |
1433 | ||
1434 | return 0; | |
1435 | } | |
1436 | ||
12670d88 RK |
1437 | /* Scan a exception region looking for the matching end and then |
1438 | remove it if possible. INSN is the start of the region, N is the | |
1439 | region number, and DELETE_OUTER is to note if anything in this | |
1440 | region can throw. | |
1441 | ||
1442 | Regions are removed if they cannot possibly catch an exception. | |
1443 | This is determined by invoking can_throw () on each insn within the | |
1444 | region; if can_throw returns true for any of the instructions, the | |
1445 | region can catch an exception, since there is an insn within the | |
1446 | region that is capable of throwing an exception. | |
1447 | ||
1448 | Returns the NOTE_INSN_EH_REGION_END corresponding to this region, or | |
1449 | calls abort () if it can't find one. | |
1450 | ||
1451 | Can abort if INSN is not a NOTE_INSN_EH_REGION_BEGIN, or if N doesn't | |
1452 | correspond to the region number, or if DELETE_OUTER is NULL. */ | |
4956d07c MS |
1453 | |
1454 | static rtx | |
1455 | scan_region (insn, n, delete_outer) | |
1456 | rtx insn; | |
1457 | int n; | |
1458 | int *delete_outer; | |
1459 | { | |
1460 | rtx start = insn; | |
1461 | ||
1462 | /* Assume we can delete the region. */ | |
1463 | int delete = 1; | |
1464 | ||
12670d88 RK |
1465 | assert (insn != NULL_RTX |
1466 | && GET_CODE (insn) == NOTE | |
1467 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG | |
1468 | && NOTE_BLOCK_NUMBER (insn) == n | |
1469 | && delete_outer != NULL); | |
1470 | ||
4956d07c MS |
1471 | insn = NEXT_INSN (insn); |
1472 | ||
1473 | /* Look for the matching end. */ | |
1474 | while (! (GET_CODE (insn) == NOTE | |
1475 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)) | |
1476 | { | |
1477 | /* If anything can throw, we can't remove the region. */ | |
1478 | if (delete && can_throw (insn)) | |
1479 | { | |
1480 | delete = 0; | |
1481 | } | |
1482 | ||
1483 | /* Watch out for and handle nested regions. */ | |
1484 | if (GET_CODE (insn) == NOTE | |
1485 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) | |
1486 | { | |
1487 | insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &delete); | |
1488 | } | |
1489 | ||
1490 | insn = NEXT_INSN (insn); | |
1491 | } | |
1492 | ||
1493 | /* The _BEG/_END NOTEs must match and nest. */ | |
1494 | if (NOTE_BLOCK_NUMBER (insn) != n) | |
1495 | abort (); | |
1496 | ||
12670d88 | 1497 | /* If anything in this exception region can throw, we can throw. */ |
4956d07c MS |
1498 | if (! delete) |
1499 | *delete_outer = 0; | |
1500 | else | |
1501 | { | |
1502 | /* Delete the start and end of the region. */ | |
1503 | delete_insn (start); | |
1504 | delete_insn (insn); | |
1505 | ||
1506 | /* Only do this part if we have built the exception handler | |
1507 | labels. */ | |
1508 | if (exception_handler_labels) | |
1509 | { | |
1510 | rtx x, *prev = &exception_handler_labels; | |
1511 | ||
1512 | /* Find it in the list of handlers. */ | |
1513 | for (x = exception_handler_labels; x; x = XEXP (x, 1)) | |
1514 | { | |
1515 | rtx label = XEXP (x, 0); | |
1516 | if (CODE_LABEL_NUMBER (label) == n) | |
1517 | { | |
1518 | /* If we are the last reference to the handler, | |
1519 | delete it. */ | |
1520 | if (--LABEL_NUSES (label) == 0) | |
1521 | delete_insn (label); | |
1522 | ||
1523 | if (optimize) | |
1524 | { | |
1525 | /* Remove it from the list of exception handler | |
1526 | labels, if we are optimizing. If we are not, then | |
1527 | leave it in the list, as we are not really going to | |
1528 | remove the region. */ | |
1529 | *prev = XEXP (x, 1); | |
1530 | XEXP (x, 1) = 0; | |
1531 | XEXP (x, 0) = 0; | |
1532 | } | |
1533 | ||
1534 | break; | |
1535 | } | |
1536 | prev = &XEXP (x, 1); | |
1537 | } | |
1538 | } | |
1539 | } | |
1540 | return insn; | |
1541 | } | |
1542 | ||
1543 | /* Perform various interesting optimizations for exception handling | |
1544 | code. | |
1545 | ||
12670d88 RK |
1546 | We look for empty exception regions and make them go (away). The |
1547 | jump optimization code will remove the handler if nothing else uses | |
1548 | it. */ | |
4956d07c MS |
1549 | |
1550 | void | |
1551 | exception_optimize () | |
1552 | { | |
1553 | rtx insn, regions = NULL_RTX; | |
1554 | int n; | |
1555 | ||
12670d88 | 1556 | /* Remove empty regions. */ |
4956d07c MS |
1557 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
1558 | { | |
1559 | if (GET_CODE (insn) == NOTE | |
1560 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) | |
1561 | { | |
12670d88 RK |
1562 | /* Since scan_region () will return the NOTE_INSN_EH_REGION_END |
1563 | insn, we will indirectly skip through all the insns | |
1564 | inbetween. We are also guaranteed that the value of insn | |
1565 | returned will be valid, as otherwise scan_region () won't | |
1566 | return. */ | |
4956d07c MS |
1567 | insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &n); |
1568 | } | |
1569 | } | |
1570 | } |