]>
Commit | Line | Data |
---|---|---|
4956d07c MS |
1 | /* Implements exceptiom handling. |
2 | Copyright (C) 1989, 92-95, 1996 Free Software Foundation, Inc. | |
3 | Contributed by Mike Stump <mrs@cygnus.com>. | |
4 | ||
5 | This file is part of GNU CC. | |
6 | ||
7 | GNU CC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GNU CC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GNU CC; see the file COPYING. If not, write to | |
19 | the Free Software Foundation, 59 Temple Place - Suite 330, | |
20 | Boston, MA 02111-1307, USA. */ | |
21 | ||
22 | ||
23 | /* This file contains the exception handling code for the compiler. | |
24 | ||
25 | Exception handling is a mechanism by which complex flows of control | |
26 | can be designated. The central concepts are the `exception region', | |
27 | the associated `exception handler' for that region and the concept of | |
28 | throwing an exception, and the context of the throw. | |
29 | ||
30 | Restrictions are, the regions must be non-overlapping, they can be | |
31 | nested, and there can be zero or more per function. For each | |
32 | region, there is one associated handler. Regions only ever | |
33 | surround possible context points of throws. Regions with no such | |
34 | context points can be optimized away, as they are trivial, and it | |
35 | is not possible for the associated handler to ever be used during a | |
36 | throw. | |
37 | ||
38 | Semantics are, when an exception is thrown, control is transferred | |
39 | to a handler, and the code of the exception handler is executed. | |
40 | As control is transferred, the machine state (stack pointer, all | |
41 | callee saved registers and possibly the frame pointer) is restored. | |
42 | ||
43 | The handler that is selected by a throw, is the handler associated | |
44 | with the smallest (most nested) region that contains the context of | |
45 | the throw, if such a region exists. If no region exists, the | |
46 | search for a handler continues in the function that called the | |
47 | function that contains the current context of the throw, with the | |
48 | context of the throw then becoming that point in the code that | |
49 | contains the call instruction. | |
50 | ||
51 | ||
52 | One can add to the basic model the concepts of thrown exception | |
53 | type, and thrown exception value. Semantics are as above, except a | |
54 | further check is done when finding a handler for the thrown | |
55 | exception to see if the given handler can handle the thrown | |
56 | exception based upon the exception object's type and possibly its | |
57 | value. A common optimization is when two regions are identical, | |
58 | the handlers are combined into just one handler so the first check | |
59 | of the resulting handler is for the inner (nested) region's | |
60 | handler, and the second one is for the outer region's handler. To | |
61 | separate these two notions of handlers, we can call the subhandlers | |
62 | `catch blocks', and use the name `handler' to refer to the | |
63 | combination of the two. Currently, this layer of functionality is | |
64 | managed by the various front ends. | |
65 | ||
66 | ||
67 | To mark the start of a exception handling region, | |
68 | expand_eh_region_start () is called. To mark the end, and | |
69 | associate a handler for the region expand_eh_region_end () is used. | |
70 | The front end can use this interface, if useful. The back end | |
71 | creates exception regions with these routines. Another interface | |
72 | the front end can use, is TARGET_EXPR. TARGET_EXPR gives an | |
73 | unwind-protect style interface a la emacs. | |
74 | ||
75 | ||
76 | In this implementation, regions do not span more than one function. | |
77 | ||
78 | In order to help with the task of finding the associated handler for | |
79 | a region, an exception table is built which associates handlers | |
80 | with regions. A 3-tuple, containing a reference to the start, the | |
81 | end and the handler is sufficient for the exception table. | |
82 | ||
83 | In order to help with the task of restoring callee saved registers | |
84 | and performing other associated function exit actions, function | |
85 | `unwinders' can be generated within those function for which a | |
86 | generic function unwinder called __unwind_function () cannot work. | |
87 | Whether the generic __unwind_function can work is machine dependent | |
88 | and possibly function dependent. The macro DOESNT_NEEED_UNWINDER | |
89 | decides if the current function being compiled needs an unwinder or | |
90 | not. | |
91 | ||
92 | The default is for unwinders to be used, as the default generic | |
93 | function unwinder only calls abort (). The compiler-generated per | |
94 | function function unwinders simply modify the context of thrown | |
95 | exception to be that of the call site, and then arrange for control | |
96 | to be transferred to __throw instead of the function's caller on | |
97 | return, and then return. */ | |
98 | ||
99 | ||
100 | #include "config.h" | |
101 | #include <stdio.h> | |
102 | #include "rtl.h" | |
103 | #include "tree.h" | |
104 | #include "flags.h" | |
105 | #include "except.h" | |
106 | #include "function.h" | |
107 | #include "insn-flags.h" | |
108 | #include "expr.h" | |
109 | #include "insn-codes.h" | |
110 | #include "regs.h" | |
111 | #include "hard-reg-set.h" | |
112 | #include "insn-config.h" | |
113 | #include "recog.h" | |
114 | #include "output.h" | |
115 | ||
116 | /* List of labels use for exception handlers. Created by | |
117 | find_exception_handler_labels for the optimization passes. */ | |
118 | ||
119 | rtx exception_handler_labels; | |
120 | ||
121 | /* Nonzero means that throw was used. Used for now, because __throw | |
122 | is emitted statically in each file. */ | |
123 | ||
124 | int throw_used; | |
125 | ||
126 | /* A stack used for keeping track of the currectly active exception | |
127 | handling region. As exceptions regions are started, an entry | |
128 | describing the region is pushed onto this stack. The current | |
129 | region can be found by looking at the top of the stack, and as we | |
130 | end regions, entries are poped. */ | |
131 | ||
132 | struct eh_stack ehstack; | |
133 | ||
134 | /* A queue used for tracking which exception regions have closed, but | |
135 | whose handlers have not yet been expanded. As we end regions, we | |
136 | enqueue the entry onto this queue. Entries are dequeue from the | |
137 | queue during expand_leftover_cleanups and expand_start_all_catch, | |
138 | and the handlers for regions are expanded in groups in an effort to | |
139 | group all the handlers together in the same region of program space | |
140 | to improve page performance. We should redo things, so that we | |
141 | either take RTL for the handler, or we expand the handler expressed | |
142 | as a tree immediately at region end time. */ | |
143 | ||
144 | struct eh_queue ehqueue; | |
145 | ||
146 | /* Insns for the catch clauses. */ | |
147 | ||
148 | rtx catch_clauses; | |
149 | ||
150 | /* A list of actions for handlers for regions that are not yet | |
151 | closed. */ | |
152 | ||
153 | tree protect_list; | |
154 | ||
155 | /* Stacks to keep track of various labels. */ | |
156 | ||
157 | /* Keeps track of the label to resume to, should one want to resume | |
158 | the normal control flow out of a handler. Also used to rethrow | |
159 | exceptions caught in handlers, as if they were physically emitted | |
160 | inline. */ | |
161 | ||
162 | struct label_node *caught_return_label_stack = NULL; | |
163 | ||
164 | /* A spare data area for the front end's own use. */ | |
165 | ||
166 | struct label_node *false_label_stack = NULL; | |
167 | ||
843e8335 | 168 | /* The rtx and the tree for the saved PC value. */ |
4956d07c MS |
169 | |
170 | rtx eh_saved_pc_rtx; | |
843e8335 | 171 | tree eh_saved_pc; |
4956d07c MS |
172 | |
173 | rtx expand_builtin_return_addr PROTO((enum built_in_function, int, rtx)); | |
174 | \f | |
175 | /* Various support routines to manipulate the various data structures | |
176 | used by the exception handling code. */ | |
177 | ||
178 | /* Push a label entry onto the given STACK. */ | |
179 | ||
180 | void | |
181 | push_label_entry (stack, rlabel, tlabel) | |
182 | struct label_node **stack; | |
183 | rtx rlabel; | |
184 | tree tlabel; | |
185 | { | |
186 | struct label_node *newnode | |
187 | = (struct label_node *) xmalloc (sizeof (struct label_node)); | |
188 | ||
189 | if (rlabel) | |
190 | newnode->u.rlabel = rlabel; | |
191 | else | |
192 | newnode->u.tlabel = tlabel; | |
193 | newnode->chain = *stack; | |
194 | *stack = newnode; | |
195 | } | |
196 | ||
197 | /* Pop a label entry from the given STACK. */ | |
198 | ||
199 | rtx | |
200 | pop_label_entry (stack) | |
201 | struct label_node **stack; | |
202 | { | |
203 | rtx label; | |
204 | struct label_node *tempnode; | |
205 | ||
206 | if (! *stack) | |
207 | return NULL_RTX; | |
208 | ||
209 | tempnode = *stack; | |
210 | label = tempnode->u.rlabel; | |
211 | *stack = (*stack)->chain; | |
212 | free (tempnode); | |
213 | ||
214 | return label; | |
215 | } | |
216 | ||
217 | /* Return the top element of the given STACK. */ | |
218 | ||
219 | tree | |
220 | top_label_entry (stack) | |
221 | struct label_node **stack; | |
222 | { | |
223 | if (! *stack) | |
224 | return NULL_TREE; | |
225 | ||
226 | return (*stack)->u.tlabel; | |
227 | } | |
228 | ||
229 | /* Copy an entry. */ | |
230 | ||
231 | static struct eh_entry * | |
232 | copy_eh_entry (entry) | |
233 | struct eh_entry *entry; | |
234 | { | |
235 | struct eh_entry *newentry; | |
236 | ||
237 | newentry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry)); | |
238 | bcopy ((char *) entry, (char *) newentry, sizeof (struct eh_entry)); | |
239 | ||
240 | return newentry; | |
241 | } | |
242 | ||
243 | /* Push an entry onto the given STACK. */ | |
244 | ||
245 | static rtx | |
246 | push_eh_entry (stack) | |
247 | struct eh_stack *stack; | |
248 | { | |
249 | struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node)); | |
250 | struct eh_entry *entry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry)); | |
251 | ||
252 | entry->start_label = gen_label_rtx (); | |
253 | entry->end_label = gen_label_rtx (); | |
254 | entry->exception_handler_label = gen_label_rtx (); | |
255 | entry->finalization = NULL_TREE; | |
256 | ||
257 | node->entry = entry; | |
258 | node->chain = stack->top; | |
259 | stack->top = node; | |
260 | ||
261 | return entry->start_label; | |
262 | } | |
263 | ||
264 | /* Pop an entry from the given STACK. */ | |
265 | ||
266 | static struct eh_entry * | |
267 | pop_eh_entry (stack) | |
268 | struct eh_stack *stack; | |
269 | { | |
270 | struct eh_node *tempnode; | |
271 | struct eh_entry *tempentry; | |
272 | ||
273 | tempnode = stack->top; | |
274 | tempentry = tempnode->entry; | |
275 | stack->top = stack->top->chain; | |
276 | free (tempnode); | |
277 | ||
278 | return tempentry; | |
279 | } | |
280 | ||
281 | /* Enqueue an ENTRY onto the given QUEUE. */ | |
282 | ||
283 | static void | |
284 | enqueue_eh_entry (queue, entry) | |
285 | struct eh_queue *queue; | |
286 | struct eh_entry *entry; | |
287 | { | |
288 | struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node)); | |
289 | ||
290 | node->entry = entry; | |
291 | node->chain = NULL; | |
292 | ||
293 | if (queue->head == NULL) | |
294 | { | |
295 | queue->head = node; | |
296 | } | |
297 | else | |
298 | { | |
299 | queue->tail->chain = node; | |
300 | } | |
301 | queue->tail = node; | |
302 | } | |
303 | ||
304 | /* Dequeue an entry from the given QUEUE. */ | |
305 | ||
306 | static struct eh_entry * | |
307 | dequeue_eh_entry (queue) | |
308 | struct eh_queue *queue; | |
309 | { | |
310 | struct eh_node *tempnode; | |
311 | struct eh_entry *tempentry; | |
312 | ||
313 | if (queue->head == NULL) | |
314 | return NULL; | |
315 | ||
316 | tempnode = queue->head; | |
317 | queue->head = queue->head->chain; | |
318 | ||
319 | tempentry = tempnode->entry; | |
320 | free (tempnode); | |
321 | ||
322 | return tempentry; | |
323 | } | |
324 | \f | |
325 | /* Routine to see if exception exception handling is turned on. | |
326 | DO_WARN is non-zero if we want to inform the user that exception | |
327 | handling is turned off. */ | |
328 | ||
329 | int | |
330 | doing_eh (do_warn) | |
331 | int do_warn; | |
332 | { | |
333 | if (! flag_exceptions) | |
334 | { | |
335 | static int warned = 0; | |
336 | if (! warned && do_warn) | |
337 | { | |
338 | error ("exception handling disabled, use -fexceptions to enable"); | |
339 | warned = 1; | |
340 | } | |
341 | return 0; | |
342 | } | |
343 | return 1; | |
344 | } | |
345 | ||
346 | /* Given the return address in ADDR, compute the new pc to throw. | |
347 | This has to work for the current frame of the current function, and | |
348 | the one above it in the case of throw. */ | |
349 | ||
350 | rtx | |
351 | eh_outer_context (addr) | |
352 | rtx addr; | |
353 | { | |
354 | /* First mask out any unwanted bits. */ | |
355 | #ifdef MASK_RETURN_ADDR | |
356 | emit_insn (gen_rtx (SET, Pmode, | |
357 | addr, | |
358 | gen_rtx (AND, Pmode, | |
359 | addr, MASK_RETURN_ADDR))); | |
360 | #endif | |
361 | ||
362 | /* Then subtract out enough to get into the prior region. If this | |
363 | is defined, assume we don't need to subtract anything, as it is | |
364 | already within the region. */ | |
365 | #if ! defined (RETURN_ADDR_OFFSET) | |
366 | addr = plus_constant (addr, -1); | |
367 | #endif | |
368 | ||
369 | return addr; | |
370 | } | |
371 | ||
372 | /* Output a note marking the start of an exception handling region. */ | |
373 | ||
374 | void | |
375 | expand_eh_region_start () | |
376 | { | |
377 | rtx note; | |
378 | ||
379 | /* This is the old code. */ | |
380 | if (! doing_eh (0)) | |
381 | return; | |
382 | ||
383 | #if 0 | |
384 | /* Maybe do this to prevent jumping in and so on... */ | |
385 | pushlevel (0); | |
386 | #endif | |
387 | ||
388 | note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG); | |
389 | emit_label (push_eh_entry (&ehstack)); | |
390 | NOTE_BLOCK_NUMBER (note) | |
391 | = CODE_LABEL_NUMBER (ehstack.top->entry->exception_handler_label); | |
392 | } | |
393 | ||
394 | /* Output a note marking the end of an exception handling region. | |
395 | HANDLER is the the handler for the exception region. */ | |
396 | ||
397 | void | |
398 | expand_eh_region_end (handler) | |
399 | tree handler; | |
400 | { | |
401 | rtx note; | |
402 | ||
403 | struct eh_entry *entry; | |
404 | ||
405 | if (! doing_eh (0)) | |
406 | return; | |
407 | ||
408 | entry = pop_eh_entry (&ehstack); | |
409 | ||
410 | note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END); | |
411 | NOTE_BLOCK_NUMBER (note) = CODE_LABEL_NUMBER (entry->exception_handler_label); | |
412 | ||
413 | emit_label (entry->end_label); | |
414 | ||
415 | /* Put in something that takes up space, as otherwise the end | |
416 | address for the EH region could have the exact same address as | |
417 | the outer region, causing us to miss the fact that resuming | |
418 | exception handling with this PC value would be inside the outer | |
419 | region. */ | |
420 | emit_insn (gen_nop ()); | |
421 | ||
422 | entry->finalization = handler; | |
423 | ||
424 | enqueue_eh_entry (&ehqueue, entry); | |
425 | ||
426 | ||
427 | #if 0 | |
428 | /* Makebe do this to prevent jumping in and so on... */ | |
429 | poplevel (1, 0, 0); | |
430 | #endif | |
431 | } | |
432 | ||
433 | /* Emit a call to __throw and note that we threw something. */ | |
434 | ||
435 | static void | |
436 | emit_throw () | |
437 | { | |
438 | #ifdef JUMP_TO_THROW | |
439 | emit_indirect_jump (throw_libfunc); | |
440 | #else | |
ccbe9ffc | 441 | SYMBOL_REF_USED (throw_libfunc) = 1; |
4956d07c MS |
442 | emit_library_call (throw_libfunc, 0, VOIDmode, 0); |
443 | #endif | |
444 | throw_used = 1; | |
445 | emit_barrier (); | |
446 | } | |
447 | ||
448 | /* An internal throw with an indirect CONTEXT we want to throw from. */ | |
449 | ||
450 | void | |
451 | expand_internal_throw_indirect (context) | |
452 | rtx context; | |
453 | { | |
843e8335 | 454 | assemble_external (eh_saved_pc); |
4956d07c MS |
455 | emit_move_insn (eh_saved_pc_rtx, context); |
456 | emit_throw (); | |
457 | } | |
458 | ||
459 | /* An internal throw with a direct CONTEXT we want to throw from. The | |
460 | context should be a label. */ | |
461 | ||
462 | void | |
463 | expand_internal_throw (context) | |
464 | rtx context; | |
465 | { | |
466 | expand_internal_throw_indirect (gen_rtx (LABEL_REF, Pmode, context)); | |
467 | } | |
468 | ||
469 | /* Called from expand_exception_blocks and expand_end_catch_block to | |
470 | expand any pending handlers. */ | |
471 | ||
472 | void | |
473 | expand_leftover_cleanups () | |
474 | { | |
475 | struct eh_entry *entry; | |
476 | ||
477 | while ((entry = dequeue_eh_entry (&ehqueue)) != 0) | |
478 | { | |
479 | rtx prev; | |
480 | ||
481 | emit_label (entry->exception_handler_label); | |
482 | ||
483 | expand_expr (entry->finalization, const0_rtx, VOIDmode, 0); | |
484 | ||
485 | prev = get_last_insn (); | |
486 | if (! (prev && GET_CODE (prev) == BARRIER)) | |
487 | { | |
488 | /* The below can be optimized away, and we could just fall into the | |
489 | next EH handler, if we are certain they are nested. */ | |
490 | /* Code to throw out to outer context, if we fall off end of the | |
491 | handler. */ | |
492 | expand_internal_throw (entry->end_label); | |
493 | } | |
494 | ||
495 | /* leftover try block, opps. */ | |
496 | if (entry->finalization == integer_zero_node) | |
497 | abort (); | |
498 | ||
499 | free (entry); | |
500 | } | |
501 | } | |
502 | ||
503 | /* Generate RTL for the start of all the catch blocks. Used for | |
504 | arranging for the exception handling code to be placed farther out | |
505 | of line than normal. */ | |
506 | ||
507 | void | |
508 | expand_start_all_catch () | |
509 | { | |
510 | struct eh_entry *entry; | |
511 | tree label; | |
512 | ||
513 | if (! doing_eh (1)) | |
514 | return; | |
515 | ||
516 | emit_line_note (input_filename, lineno); | |
517 | label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); | |
518 | ||
519 | /* The label for the exception handling block we will save. This is | |
520 | Lresume, in the documention. */ | |
521 | expand_label (label); | |
522 | ||
523 | /* Put in something that takes up space, as otherwise the end | |
524 | address for the EH region could have the exact same address as | |
525 | the outer region, causing us to miss the fact that resuming | |
526 | exception handling with this PC value would be inside the outer | |
527 | region. */ | |
528 | emit_insn (gen_nop ()); | |
529 | ||
530 | push_label_entry (&caught_return_label_stack, NULL_RTX, label); | |
531 | ||
532 | /* Start a new sequence for all the catch blocks. We will add this | |
533 | to the gloabl sequence catch_clauses, when we have completed all | |
534 | the handlers in this handler-seq. */ | |
535 | start_sequence (); | |
536 | ||
537 | while (1) | |
538 | { | |
539 | rtx prev; | |
540 | ||
541 | entry = dequeue_eh_entry (&ehqueue); | |
542 | emit_label (entry->exception_handler_label); | |
543 | ||
544 | expand_expr (entry->finalization, const0_rtx, VOIDmode, 0); | |
545 | ||
546 | /* When we get down to the matching entry, stop. */ | |
547 | if (entry->finalization == integer_zero_node) | |
548 | break; | |
549 | ||
550 | prev = get_last_insn (); | |
551 | if (! (prev && GET_CODE (prev) == BARRIER)) | |
552 | { | |
553 | /* The below can be optimized away, and we could just fall into the | |
554 | next EH handler, if we are certain they are nested. */ | |
555 | /* Code to throw out to outer context, if we fall off end of the | |
556 | handler. */ | |
557 | expand_internal_throw (entry->end_label); | |
558 | } | |
559 | ||
560 | free (entry); | |
561 | } | |
562 | } | |
563 | ||
564 | /* Generate RTL for the end of all the catch blocks. */ | |
565 | ||
566 | void | |
567 | expand_end_all_catch () | |
568 | { | |
569 | rtx new_catch_clause; | |
570 | ||
571 | if (! doing_eh (1)) | |
572 | return; | |
573 | ||
574 | /* Code to throw out to outer context, if we fall off end of catch | |
575 | handlers. This is rethrow (Lresume, same id, same obj); in the | |
576 | documentation. */ | |
577 | expand_internal_throw (DECL_RTL (top_label_entry (&caught_return_label_stack))); | |
578 | ||
579 | /* Now we have the complete catch sequence. */ | |
580 | new_catch_clause = get_insns (); | |
581 | end_sequence (); | |
582 | ||
583 | /* This level of catch blocks is done, so set up the successful | |
584 | catch jump label for the next layer of catch blocks. */ | |
585 | pop_label_entry (&caught_return_label_stack); | |
586 | ||
587 | /* Add the new sequence of catches to the main one for this function. */ | |
588 | push_to_sequence (catch_clauses); | |
589 | emit_insns (new_catch_clause); | |
590 | catch_clauses = get_insns (); | |
591 | end_sequence (); | |
592 | ||
593 | /* Here we fall through into the continuation code. */ | |
594 | } | |
595 | ||
596 | /* End all the pending exception regions from protect_list that have | |
597 | been started, but not yet completed. */ | |
598 | ||
599 | void | |
600 | end_protect_partials () | |
601 | { | |
602 | while (protect_list) | |
603 | { | |
604 | expand_eh_region_end (TREE_VALUE (protect_list)); | |
605 | protect_list = TREE_CHAIN (protect_list); | |
606 | } | |
607 | } | |
608 | \f | |
609 | /* The exception table that we build that is used for looking up and | |
610 | dispatching exceptions, it's size, and it's maximum size before we | |
611 | have to extend it. */ | |
612 | static int *eh_table; | |
613 | static int eh_table_size; | |
614 | static int eh_table_max_size; | |
615 | ||
616 | /* Note the need for an exception table entry for region N. If we | |
617 | don't need to output an explicit exception table, avoid all the | |
618 | extra work. Called during final_scan_insn time. */ | |
619 | ||
620 | void | |
621 | add_eh_table_entry (n) | |
622 | int n; | |
623 | { | |
624 | #ifndef OMIT_EH_TABLE | |
625 | if (eh_table_size >= eh_table_max_size) | |
626 | { | |
627 | if (eh_table) | |
628 | { | |
629 | eh_table_max_size += eh_table_max_size>>1; | |
630 | ||
631 | if (eh_table_max_size < 0) | |
632 | abort (); | |
633 | ||
abf3bf38 JW |
634 | if ((eh_table = (int *) realloc (eh_table, |
635 | eh_table_max_size * sizeof (int))) | |
4956d07c MS |
636 | == 0) |
637 | fatal ("virtual memory exhausted"); | |
638 | } | |
639 | else | |
640 | { | |
641 | eh_table_max_size = 252; | |
642 | eh_table = (int *) xmalloc (eh_table_max_size * sizeof (int)); | |
643 | } | |
644 | } | |
645 | eh_table[eh_table_size++] = n; | |
646 | #endif | |
647 | } | |
648 | ||
649 | /* Conditional to test to see if we need to output an exception table. | |
650 | Note, on some platforms, we don't have to output a table | |
651 | explicitly. This routine doesn't mean we don't have one. */ | |
652 | ||
653 | int | |
654 | exception_table_p () | |
655 | { | |
656 | if (eh_table) | |
657 | return 1; | |
658 | ||
659 | return 0; | |
660 | } | |
661 | ||
662 | /* Output an entry N for the exception table to the specified FILE. */ | |
663 | ||
664 | static void | |
665 | output_exception_table_entry (file, n) | |
666 | FILE *file; | |
667 | int n; | |
668 | { | |
669 | char buf[256]; | |
670 | rtx sym; | |
671 | ||
672 | ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n); | |
673 | sym = gen_rtx (SYMBOL_REF, Pmode, buf); | |
674 | assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1); | |
675 | ||
676 | ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n); | |
677 | sym = gen_rtx (SYMBOL_REF, Pmode, buf); | |
678 | assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1); | |
679 | ||
680 | ASM_GENERATE_INTERNAL_LABEL (buf, "L", n); | |
681 | sym = gen_rtx (SYMBOL_REF, Pmode, buf); | |
682 | assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1); | |
683 | ||
684 | putc ('\n', file); /* blank line */ | |
685 | } | |
686 | ||
687 | /* Output the exception table if we have one and need one. */ | |
688 | ||
689 | void | |
690 | output_exception_table () | |
691 | { | |
692 | int i; | |
693 | extern FILE *asm_out_file; | |
694 | ||
695 | if (! doing_eh (0)) | |
696 | return; | |
697 | ||
698 | exception_section (); | |
699 | ||
700 | /* Beginning marker for table. */ | |
701 | assemble_align (GET_MODE_ALIGNMENT (ptr_mode)); | |
702 | assemble_label ("__EXCEPTION_TABLE__"); | |
703 | ||
704 | assemble_integer (const0_rtx, POINTER_SIZE / BITS_PER_UNIT, 1); | |
705 | assemble_integer (const0_rtx, POINTER_SIZE / BITS_PER_UNIT, 1); | |
706 | assemble_integer (const0_rtx, POINTER_SIZE / BITS_PER_UNIT, 1); | |
707 | putc ('\n', asm_out_file); /* blank line */ | |
708 | ||
709 | for (i = 0; i < eh_table_size; ++i) | |
710 | output_exception_table_entry (asm_out_file, eh_table[i]); | |
711 | ||
712 | free (eh_table); | |
713 | ||
714 | /* Ending marker for table. */ | |
715 | assemble_label ("__EXCEPTION_END__"); | |
716 | assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1); | |
717 | assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1); | |
718 | assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1); | |
719 | putc ('\n', asm_out_file); /* blank line */ | |
720 | } | |
721 | ||
722 | /* Generate code to initialize the exception table at program startup | |
723 | time. */ | |
724 | ||
725 | void | |
726 | register_exception_table () | |
727 | { | |
728 | emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__register_exceptions"), 0, | |
729 | VOIDmode, 1, | |
730 | gen_rtx (SYMBOL_REF, Pmode, "__EXCEPTION_TABLE__"), | |
731 | Pmode); | |
732 | } | |
733 | \f | |
734 | /* Emit the RTL for the start of the per function unwinder for the | |
735 | current function. */ | |
736 | ||
737 | void | |
738 | start_eh_unwinder () | |
739 | { | |
740 | #ifdef DOESNT_NEED_UNWINDER | |
741 | if (DOESNT_NEED_UNWINDER) | |
742 | return; | |
743 | #endif | |
744 | ||
745 | expand_eh_region_start (); | |
746 | } | |
747 | ||
748 | /* Emit the RTL for the end of the per function unwinder for the | |
749 | current function. */ | |
750 | ||
751 | void | |
752 | end_eh_unwinder () | |
753 | { | |
754 | tree expr; | |
755 | rtx return_val_rtx, ret_val, label, end, insns; | |
756 | ||
757 | if (! doing_eh (0)) | |
758 | return; | |
759 | ||
760 | #ifdef DOESNT_NEED_UNWINDER | |
761 | if (DOESNT_NEED_UNWINDER) | |
762 | return; | |
763 | #endif | |
764 | ||
843e8335 MS |
765 | assemble_external (eh_saved_pc); |
766 | ||
4956d07c MS |
767 | expr = make_node (RTL_EXPR); |
768 | TREE_TYPE (expr) = void_type_node; | |
769 | RTL_EXPR_RTL (expr) = const0_rtx; | |
770 | TREE_SIDE_EFFECTS (expr) = 1; | |
771 | start_sequence_for_rtl_expr (expr); | |
772 | ||
773 | ret_val = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS, | |
774 | 0, hard_frame_pointer_rtx); | |
775 | return_val_rtx = copy_to_reg (ret_val); | |
776 | ||
777 | return_val_rtx = eh_outer_context (return_val_rtx); | |
778 | ||
779 | emit_move_insn (eh_saved_pc_rtx, return_val_rtx); | |
780 | ||
781 | #ifdef JUMP_TO_THROW | |
782 | emit_move_insn (ret_val, throw_libfunc); | |
783 | #else | |
784 | label = gen_label_rtx (); | |
785 | emit_move_insn (ret_val, gen_rtx (LABEL_REF, Pmode, label)); | |
786 | #endif | |
787 | ||
788 | #ifdef RETURN_ADDR_OFFSET | |
789 | return_val_rtx = plus_constant (ret_val, -RETURN_ADDR_OFFSET); | |
790 | if (return_val_rtx != ret_val) | |
791 | emit_move_insn (ret_val, return_val_rtx); | |
792 | #endif | |
793 | ||
794 | end = gen_label_rtx (); | |
795 | emit_jump (end); | |
796 | ||
797 | RTL_EXPR_SEQUENCE (expr) = get_insns (); | |
798 | end_sequence (); | |
799 | expand_eh_region_end (expr); | |
800 | ||
801 | emit_jump (end); | |
802 | ||
803 | #ifndef JUMP_TO_THROW | |
804 | emit_label (label); | |
805 | emit_throw (); | |
806 | #endif | |
807 | ||
808 | expand_leftover_cleanups (); | |
809 | ||
810 | emit_label (end); | |
811 | } | |
812 | ||
813 | /* Emit the RTL for the per function unwinder for the current | |
814 | function, if needed. Called after all the code that needs unwind | |
815 | protection is output. */ | |
816 | ||
817 | void | |
818 | emit_unwinder () | |
819 | { | |
820 | rtx insns; | |
821 | ||
822 | start_sequence (); | |
823 | start_eh_unwinder (); | |
824 | insns = get_insns (); | |
825 | end_sequence (); | |
826 | ||
827 | if (insns) | |
828 | emit_insns_after (insns, get_insns ()); | |
829 | ||
830 | end_eh_unwinder (); | |
831 | } | |
832 | ||
833 | /* Scan the current insns and build a list of handler labels. Called | |
834 | after the last exception handling region is added to the current | |
835 | function (when the rtl is almost all built for the current | |
836 | function) and before the jump optimization pass. */ | |
837 | ||
838 | void | |
839 | find_exception_handler_labels () | |
840 | { | |
841 | rtx insn; | |
842 | int max_labelno = max_label_num (); | |
843 | int min_labelno = get_first_label_num (); | |
844 | rtx *labels; | |
845 | ||
846 | exception_handler_labels = NULL_RTX; | |
847 | ||
848 | /* If we aren't doing exception handling, there isn't much to check. */ | |
849 | if (! doing_eh (0)) | |
850 | return; | |
851 | ||
852 | /* First we generate a handy reference to each label. */ | |
853 | ||
854 | labels = (rtx *) alloca ((max_labelno - min_labelno) * sizeof (rtx)); | |
855 | labels -= min_labelno; | |
856 | ||
857 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
858 | { | |
859 | if (GET_CODE (insn) == CODE_LABEL) | |
860 | if (CODE_LABEL_NUMBER (insn) >= min_labelno | |
861 | && CODE_LABEL_NUMBER (insn) < max_labelno) | |
862 | labels[CODE_LABEL_NUMBER (insn)] = insn; | |
863 | } | |
864 | ||
865 | /* Then for each start of a region, we add its label to the list. */ | |
866 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
867 | { | |
868 | if (GET_CODE (insn) == NOTE | |
869 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) | |
870 | { | |
871 | rtx label = NULL_RTX; | |
872 | ||
873 | if (NOTE_BLOCK_NUMBER (insn) >= min_labelno | |
874 | && NOTE_BLOCK_NUMBER (insn) < max_labelno) | |
875 | { | |
876 | label = labels[NOTE_BLOCK_NUMBER (insn)]; | |
877 | ||
878 | if (label) | |
879 | exception_handler_labels | |
880 | = gen_rtx (EXPR_LIST, VOIDmode, | |
881 | label, exception_handler_labels); | |
882 | else | |
883 | warning ("didn't find handler for EH region %d", | |
884 | NOTE_BLOCK_NUMBER (insn)); | |
885 | } | |
886 | else | |
887 | warning ("mismatched EH region %d", NOTE_BLOCK_NUMBER (insn)); | |
888 | } | |
889 | } | |
890 | } | |
891 | ||
892 | /* Do some sanity checking on the exception_handler_labels list. Can | |
893 | be called after find_exception_handler_labels is called to build | |
894 | the list of exception handlers for the current function, and before | |
895 | we finish processing the current function. */ | |
896 | ||
897 | void | |
898 | check_exception_handler_labels () | |
899 | { | |
900 | rtx insn, handler; | |
901 | ||
902 | /* If we aren't doing exception handling, there isn't much to check. */ | |
903 | if (! doing_eh (0)) | |
904 | return; | |
905 | ||
906 | for (handler = exception_handler_labels; | |
907 | handler; | |
908 | handler = XEXP (handler, 1)) | |
909 | { | |
910 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
911 | { | |
912 | if (GET_CODE (insn) == CODE_LABEL) | |
913 | { | |
914 | if (CODE_LABEL_NUMBER (insn) | |
915 | == CODE_LABEL_NUMBER (XEXP (handler, 0))) | |
916 | { | |
917 | if (insn != XEXP (handler, 0)) | |
918 | warning ("mismatched handler %d", | |
919 | CODE_LABEL_NUMBER (insn)); | |
920 | break; | |
921 | } | |
922 | } | |
923 | } | |
924 | if (insn == NULL_RTX) | |
925 | warning ("handler not found %d", | |
926 | CODE_LABEL_NUMBER (XEXP (handler, 0))); | |
927 | } | |
928 | ||
929 | /* Now go through, and make sure that for each region we have, that we | |
930 | have the corresponding label. */ | |
931 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
932 | { | |
933 | if (GET_CODE (insn) == NOTE | |
934 | && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG || | |
935 | NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)) | |
936 | { | |
937 | for (handler = exception_handler_labels; | |
938 | handler; | |
939 | handler = XEXP (handler, 1)) | |
940 | { | |
941 | if (CODE_LABEL_NUMBER (XEXP (handler, 0)) | |
942 | == NOTE_BLOCK_NUMBER (insn)) | |
943 | break; | |
944 | } | |
945 | if (handler == NULL_RTX) | |
946 | warning ("region exists, no handler %d", | |
947 | NOTE_BLOCK_NUMBER (insn)); | |
948 | } | |
949 | } | |
950 | } | |
951 | \f | |
952 | /* This group of functions initializes the exception handling data | |
953 | structures at the start of the compilation, initializes the data | |
954 | structures at the start of a function, saves and restores the | |
955 | exception handling data structures for the start/end of a nested | |
956 | function. */ | |
957 | ||
958 | /* Toplevel initialization for EH things. */ | |
959 | ||
960 | void | |
961 | init_eh () | |
962 | { | |
843e8335 MS |
963 | tree type = build_pointer_type (make_node (VOID_TYPE)); |
964 | ||
965 | eh_saved_pc = build_decl (VAR_DECL, get_identifier ("__eh_pc"), type); | |
966 | DECL_EXTERNAL (eh_saved_pc) = 1; | |
967 | TREE_PUBLIC (eh_saved_pc) = 1; | |
968 | make_decl_rtl (eh_saved_pc, NULL_PTR, 1); | |
969 | eh_saved_pc_rtx = DECL_RTL (eh_saved_pc); | |
4956d07c MS |
970 | } |
971 | ||
972 | /* Initialize various EH things. */ | |
973 | ||
974 | void | |
975 | init_eh_for_function () | |
976 | { | |
977 | ehstack.top = 0; | |
978 | ehqueue.head = ehqueue.tail = 0; | |
979 | catch_clauses = NULL_RTX; | |
980 | false_label_stack = 0; | |
981 | caught_return_label_stack = 0; | |
982 | protect_list = NULL_TREE; | |
983 | } | |
984 | ||
985 | /* Save various EH things for the current function into the save area | |
986 | denoted by P. */ | |
987 | ||
988 | void | |
989 | save_eh_status (p) | |
990 | struct function *p; | |
991 | { | |
992 | p->ehstack = ehstack; | |
993 | p->ehqueue = ehqueue; | |
994 | p->catch_clauses = catch_clauses; | |
995 | p->false_label_stack = false_label_stack; | |
996 | p->caught_return_label_stack = caught_return_label_stack; | |
997 | p->protect_list = protect_list; | |
998 | ||
999 | init_eh (); | |
1000 | } | |
1001 | ||
1002 | /* Restore various EH things for the current function from the save | |
1003 | area denoted by P. */ | |
1004 | ||
1005 | void | |
1006 | restore_eh_status (p) | |
1007 | struct function *p; | |
1008 | { | |
1009 | protect_list = p->protect_list; | |
1010 | caught_return_label_stack = p->caught_return_label_stack; | |
1011 | false_label_stack = p->false_label_stack; | |
1012 | catch_clauses = p->catch_clauses; | |
1013 | ehqueue = p->ehqueue; | |
1014 | ehstack = p->ehstack; | |
1015 | } | |
1016 | \f | |
1017 | /* This section is for the exception handling specific optimization | |
1018 | pass. First are the internal routines, and then the main | |
1019 | optimization pass. */ | |
1020 | ||
1021 | /* Determine if the given INSN can throw an exception. */ | |
1022 | ||
1023 | static int | |
1024 | can_throw (insn) | |
1025 | rtx insn; | |
1026 | { | |
1027 | /* The only things that can possibly throw are calls. */ | |
1028 | if (GET_CODE (insn) == CALL_INSN) | |
1029 | return 1; | |
1030 | ||
1031 | #ifdef ASYNCH_EXCEPTIONS | |
1032 | /* If we wanted asynchronous exceptions, then everything but NOTEs | |
1033 | and CODE_LABELs could throw. */ | |
1034 | if (GET_CODE (insn) != NOTE && GET_CODE (insn) != CODE_LABEL) | |
1035 | return 1; | |
1036 | #endif | |
1037 | ||
1038 | return 0; | |
1039 | } | |
1040 | ||
1041 | /* Scan a region, looking for a matching end, and decide if the region | |
1042 | can be removed. INSN is the start of the region, N is the region | |
1043 | number, and DELETE_OUTER is to note if anything in this region can | |
1044 | throw. */ | |
1045 | ||
1046 | static rtx | |
1047 | scan_region (insn, n, delete_outer) | |
1048 | rtx insn; | |
1049 | int n; | |
1050 | int *delete_outer; | |
1051 | { | |
1052 | rtx start = insn; | |
1053 | ||
1054 | /* Assume we can delete the region. */ | |
1055 | int delete = 1; | |
1056 | ||
1057 | insn = NEXT_INSN (insn); | |
1058 | ||
1059 | /* Look for the matching end. */ | |
1060 | while (! (GET_CODE (insn) == NOTE | |
1061 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)) | |
1062 | { | |
1063 | /* If anything can throw, we can't remove the region. */ | |
1064 | if (delete && can_throw (insn)) | |
1065 | { | |
1066 | delete = 0; | |
1067 | } | |
1068 | ||
1069 | /* Watch out for and handle nested regions. */ | |
1070 | if (GET_CODE (insn) == NOTE | |
1071 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) | |
1072 | { | |
1073 | insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &delete); | |
1074 | } | |
1075 | ||
1076 | insn = NEXT_INSN (insn); | |
1077 | } | |
1078 | ||
1079 | /* The _BEG/_END NOTEs must match and nest. */ | |
1080 | if (NOTE_BLOCK_NUMBER (insn) != n) | |
1081 | abort (); | |
1082 | ||
1083 | /* If anything can throw, we can throw. */ | |
1084 | if (! delete) | |
1085 | *delete_outer = 0; | |
1086 | else | |
1087 | { | |
1088 | /* Delete the start and end of the region. */ | |
1089 | delete_insn (start); | |
1090 | delete_insn (insn); | |
1091 | ||
1092 | /* Only do this part if we have built the exception handler | |
1093 | labels. */ | |
1094 | if (exception_handler_labels) | |
1095 | { | |
1096 | rtx x, *prev = &exception_handler_labels; | |
1097 | ||
1098 | /* Find it in the list of handlers. */ | |
1099 | for (x = exception_handler_labels; x; x = XEXP (x, 1)) | |
1100 | { | |
1101 | rtx label = XEXP (x, 0); | |
1102 | if (CODE_LABEL_NUMBER (label) == n) | |
1103 | { | |
1104 | /* If we are the last reference to the handler, | |
1105 | delete it. */ | |
1106 | if (--LABEL_NUSES (label) == 0) | |
1107 | delete_insn (label); | |
1108 | ||
1109 | if (optimize) | |
1110 | { | |
1111 | /* Remove it from the list of exception handler | |
1112 | labels, if we are optimizing. If we are not, then | |
1113 | leave it in the list, as we are not really going to | |
1114 | remove the region. */ | |
1115 | *prev = XEXP (x, 1); | |
1116 | XEXP (x, 1) = 0; | |
1117 | XEXP (x, 0) = 0; | |
1118 | } | |
1119 | ||
1120 | break; | |
1121 | } | |
1122 | prev = &XEXP (x, 1); | |
1123 | } | |
1124 | } | |
1125 | } | |
1126 | return insn; | |
1127 | } | |
1128 | ||
1129 | /* Perform various interesting optimizations for exception handling | |
1130 | code. | |
1131 | ||
1132 | We find empty exception regions, and remove them. The jump | |
1133 | optimization code will remove the handler if nothing else uses it. */ | |
1134 | ||
1135 | void | |
1136 | exception_optimize () | |
1137 | { | |
1138 | rtx insn, regions = NULL_RTX; | |
1139 | int n; | |
1140 | ||
1141 | /* First remove empty regions. */ | |
1142 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
1143 | { | |
1144 | if (GET_CODE (insn) == NOTE | |
1145 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) | |
1146 | { | |
1147 | insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &n); | |
1148 | } | |
1149 | } | |
1150 | } |