]> gcc.gnu.org Git - gcc.git/blob - gcc/except.c
except.c (call_get_eh_context): Don't mess with sequences.
[gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989, 92-96, 1997 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* An exception is an event that can be signaled from within a
24 function. This event can then be "caught" or "trapped" by the
25 callers of this function. This potentially allows program flow to
26 be transferred to any arbitrary code associated with a function call
27 several levels up the stack.
28
29 The intended use for this mechanism is for signaling "exceptional
30 events" in an out-of-band fashion, hence its name. The C++ language
31 (and many other OO-styled or functional languages) practically
32 requires such a mechanism, as otherwise it becomes very difficult
33 or even impossible to signal failure conditions in complex
34 situations. The traditional C++ example is when an error occurs in
35 the process of constructing an object; without such a mechanism, it
36 is impossible to signal that the error occurs without adding global
37 state variables and error checks around every object construction.
38
39 The act of causing this event to occur is referred to as "throwing
40 an exception". (Alternate terms include "raising an exception" or
41 "signaling an exception".) The term "throw" is used because control
42 is returned to the callers of the function that is signaling the
43 exception, and thus there is the concept of "throwing" the
44 exception up the call stack.
45
46 There are two major codegen options for exception handling. The
47 flag -fsjlj-exceptions can be used to select the setjmp/longjmp
48 approach, which is the default. -fno-sjlj-exceptions can be used to
49 get the PC range table approach. While this is a compile time
50 flag, an entire application must be compiled with the same codegen
51 option. The first is a PC range table approach, the second is a
52 setjmp/longjmp based scheme. We will first discuss the PC range
53 table approach, after that, we will discuss the setjmp/longjmp
54 based approach.
55
56 It is appropriate to speak of the "context of a throw". This
57 context refers to the address where the exception is thrown from,
58 and is used to determine which exception region will handle the
59 exception.
60
61 Regions of code within a function can be marked such that if it
62 contains the context of a throw, control will be passed to a
63 designated "exception handler". These areas are known as "exception
64 regions". Exception regions cannot overlap, but they can be nested
65 to any arbitrary depth. Also, exception regions cannot cross
66 function boundaries.
67
68 Exception handlers can either be specified by the user (which we
69 will call a "user-defined handler") or generated by the compiler
70 (which we will designate as a "cleanup"). Cleanups are used to
71 perform tasks such as destruction of objects allocated on the
72 stack.
73
74 In the current implementation, cleanups are handled by allocating an
75 exception region for the area that the cleanup is designated for,
76 and the handler for the region performs the cleanup and then
77 rethrows the exception to the outer exception region. From the
78 standpoint of the current implementation, there is little
79 distinction made between a cleanup and a user-defined handler, and
80 the phrase "exception handler" can be used to refer to either one
81 equally well. (The section "Future Directions" below discusses how
82 this will change).
83
84 Each object file that is compiled with exception handling contains
85 a static array of exception handlers named __EXCEPTION_TABLE__.
86 Each entry contains the starting and ending addresses of the
87 exception region, and the address of the handler designated for
88 that region.
89
90 If the target does not use the DWARF 2 frame unwind information, at
91 program startup each object file invokes a function named
92 __register_exceptions with the address of its local
93 __EXCEPTION_TABLE__. __register_exceptions is defined in libgcc2.c, and
94 is responsible for recording all of the exception regions into one list
95 (which is kept in a static variable named exception_table_list).
96
97 On targets that support crtstuff.c, the unwind information
98 is stored in a section named .eh_frame and the information for the
99 entire shared object or program is registered with a call to
100 __register_frame. On other targets, the information for each
101 translation unit is registered from the file generated by collect2.
102 __register_frame is defined in frame.c, and is responsible for
103 recording all of the unwind regions into one list (which is kept in a
104 static variable named unwind_table_list).
105
106 The function __throw is actually responsible for doing the
107 throw. On machines that have unwind info support, __throw is generated
108 by code in libgcc2.c, otherwise __throw is generated on a
109 per-object-file basis for each source file compiled with
110 -fexceptions by the the C++ frontend. Before __throw is invoked,
111 the current context of the throw needs to be placed in the global
112 variable __eh_pc.
113
114 __throw attempts to find the appropriate exception handler for the
115 PC value stored in __eh_pc by calling __find_first_exception_table_match
116 (which is defined in libgcc2.c). If __find_first_exception_table_match
117 finds a relevant handler, __throw transfers control directly to it.
118
119 If a handler for the context being thrown from can't be found, __throw
120 walks (see Walking the stack below) the stack up the dynamic call chain to
121 continue searching for an appropriate exception handler based upon the
122 caller of the function it last sought a exception handler for. It stops
123 then either an exception handler is found, or when the top of the
124 call chain is reached.
125
126 If no handler is found, an external library function named
127 __terminate is called. If a handler is found, then we restart
128 our search for a handler at the end of the call chain, and repeat
129 the search process, but instead of just walking up the call chain,
130 we unwind the call chain as we walk up it.
131
132 Internal implementation details:
133
134 To associate a user-defined handler with a block of statements, the
135 function expand_start_try_stmts is used to mark the start of the
136 block of statements with which the handler is to be associated
137 (which is known as a "try block"). All statements that appear
138 afterwards will be associated with the try block.
139
140 A call to expand_start_all_catch marks the end of the try block,
141 and also marks the start of the "catch block" (the user-defined
142 handler) associated with the try block.
143
144 This user-defined handler will be invoked for *every* exception
145 thrown with the context of the try block. It is up to the handler
146 to decide whether or not it wishes to handle any given exception,
147 as there is currently no mechanism in this implementation for doing
148 this. (There are plans for conditionally processing an exception
149 based on its "type", which will provide a language-independent
150 mechanism).
151
152 If the handler chooses not to process the exception (perhaps by
153 looking at an "exception type" or some other additional data
154 supplied with the exception), it can fall through to the end of the
155 handler. expand_end_all_catch and expand_leftover_cleanups
156 add additional code to the end of each handler to take care of
157 rethrowing to the outer exception handler.
158
159 The handler also has the option to continue with "normal flow of
160 code", or in other words to resume executing at the statement
161 immediately after the end of the exception region. The variable
162 caught_return_label_stack contains a stack of labels, and jumping
163 to the topmost entry's label via expand_goto will resume normal
164 flow to the statement immediately after the end of the exception
165 region. If the handler falls through to the end, the exception will
166 be rethrown to the outer exception region.
167
168 The instructions for the catch block are kept as a separate
169 sequence, and will be emitted at the end of the function along with
170 the handlers specified via expand_eh_region_end. The end of the
171 catch block is marked with expand_end_all_catch.
172
173 Any data associated with the exception must currently be handled by
174 some external mechanism maintained in the frontend. For example,
175 the C++ exception mechanism passes an arbitrary value along with
176 the exception, and this is handled in the C++ frontend by using a
177 global variable to hold the value. (This will be changing in the
178 future.)
179
180 The mechanism in C++ for handling data associated with the
181 exception is clearly not thread-safe. For a thread-based
182 environment, another mechanism must be used (possibly using a
183 per-thread allocation mechanism if the size of the area that needs
184 to be allocated isn't known at compile time.)
185
186 Internally-generated exception regions (cleanups) are marked by
187 calling expand_eh_region_start to mark the start of the region,
188 and expand_eh_region_end (handler) is used to both designate the
189 end of the region and to associate a specified handler/cleanup with
190 the region. The rtl code in HANDLER will be invoked whenever an
191 exception occurs in the region between the calls to
192 expand_eh_region_start and expand_eh_region_end. After HANDLER is
193 executed, additional code is emitted to handle rethrowing the
194 exception to the outer exception handler. The code for HANDLER will
195 be emitted at the end of the function.
196
197 TARGET_EXPRs can also be used to designate exception regions. A
198 TARGET_EXPR gives an unwind-protect style interface commonly used
199 in functional languages such as LISP. The associated expression is
200 evaluated, and whether or not it (or any of the functions that it
201 calls) throws an exception, the protect expression is always
202 invoked. This implementation takes care of the details of
203 associating an exception table entry with the expression and
204 generating the necessary code (it actually emits the protect
205 expression twice, once for normal flow and once for the exception
206 case). As for the other handlers, the code for the exception case
207 will be emitted at the end of the function.
208
209 Cleanups can also be specified by using add_partial_entry (handler)
210 and end_protect_partials. add_partial_entry creates the start of
211 a new exception region; HANDLER will be invoked if an exception is
212 thrown with the context of the region between the calls to
213 add_partial_entry and end_protect_partials. end_protect_partials is
214 used to mark the end of these regions. add_partial_entry can be
215 called as many times as needed before calling end_protect_partials.
216 However, end_protect_partials should only be invoked once for each
217 group of calls to add_partial_entry as the entries are queued
218 and all of the outstanding entries are processed simultaneously
219 when end_protect_partials is invoked. Similarly to the other
220 handlers, the code for HANDLER will be emitted at the end of the
221 function.
222
223 The generated RTL for an exception region includes
224 NOTE_INSN_EH_REGION_BEG and NOTE_INSN_EH_REGION_END notes that mark
225 the start and end of the exception region. A unique label is also
226 generated at the start of the exception region, which is available
227 by looking at the ehstack variable. The topmost entry corresponds
228 to the current region.
229
230 In the current implementation, an exception can only be thrown from
231 a function call (since the mechanism used to actually throw an
232 exception involves calling __throw). If an exception region is
233 created but no function calls occur within that region, the region
234 can be safely optimized away (along with its exception handlers)
235 since no exceptions can ever be caught in that region. This
236 optimization is performed unless -fasynchronous-exceptions is
237 given. If the user wishes to throw from a signal handler, or other
238 asynchronous place, -fasynchronous-exceptions should be used when
239 compiling for maximally correct code, at the cost of additional
240 exception regions. Using -fasynchronous-exceptions only produces
241 code that is reasonably safe in such situations, but a correct
242 program cannot rely upon this working. It can be used in failsafe
243 code, where trying to continue on, and proceeding with potentially
244 incorrect results is better than halting the program.
245
246
247 Walking the stack:
248
249 The stack is walked by starting with a pointer to the current
250 frame, and finding the pointer to the callers frame. The unwind info
251 tells __throw how to find it.
252
253 Unwinding the stack:
254
255 When we use the term unwinding the stack, we mean undoing the
256 effects of the function prologue in a controlled fashion so that we
257 still have the flow of control. Otherwise, we could just return
258 (jump to the normal end of function epilogue).
259
260 This is done in __throw in libgcc2.c when we know that a handler exists
261 in a frame higher up the call stack than its immediate caller.
262
263 To unwind, we find the unwind data associated with the frame, if any.
264 If we don't find any, we call the library routine __terminate. If we do
265 find it, we use the information to copy the saved register values from
266 that frame into the register save area in the frame for __throw, return
267 into a stub which updates the stack pointer, and jump to the handler.
268 The normal function epilogue for __throw handles restoring the saved
269 values into registers.
270
271 When unwinding, we use this method if we know it will
272 work (if DWARF2_UNWIND_INFO is defined). Otherwise, we know that
273 an inline unwinder will have been emitted for any function that
274 __unwind_function cannot unwind. The inline unwinder appears as a
275 normal exception handler for the entire function, for any function
276 that we know cannot be unwound by __unwind_function. We inform the
277 compiler of whether a function can be unwound with
278 __unwind_function by having DOESNT_NEED_UNWINDER evaluate to true
279 when the unwinder isn't needed. __unwind_function is used as an
280 action of last resort. If no other method can be used for
281 unwinding, __unwind_function is used. If it cannot unwind, it
282 should call __terminate.
283
284 By default, if the target-specific backend doesn't supply a definition
285 for __unwind_function and doesn't support DWARF2_UNWIND_INFO, inlined
286 unwinders will be used instead. The main tradeoff here is in text space
287 utilization. Obviously, if inline unwinders have to be generated
288 repeatedly, this uses much more space than if a single routine is used.
289
290 However, it is simply not possible on some platforms to write a
291 generalized routine for doing stack unwinding without having some
292 form of additional data associated with each function. The current
293 implementation can encode this data in the form of additional
294 machine instructions or as static data in tabular form. The later
295 is called the unwind data.
296
297 The backend macro DOESNT_NEED_UNWINDER is used to conditionalize whether
298 or not per-function unwinders are needed. If DOESNT_NEED_UNWINDER is
299 defined and has a non-zero value, a per-function unwinder is not emitted
300 for the current function. If the static unwind data is supported, then
301 a per-function unwinder is not emitted.
302
303 On some platforms it is possible that neither __unwind_function
304 nor inlined unwinders are available. For these platforms it is not
305 possible to throw through a function call, and abort will be
306 invoked instead of performing the throw.
307
308 The reason the unwind data may be needed is that on some platforms
309 the order and types of data stored on the stack can vary depending
310 on the type of function, its arguments and returned values, and the
311 compilation options used (optimization versus non-optimization,
312 -fomit-frame-pointer, processor variations, etc).
313
314 Unfortunately, this also means that throwing through functions that
315 aren't compiled with exception handling support will still not be
316 possible on some platforms. This problem is currently being
317 investigated, but no solutions have been found that do not imply
318 some unacceptable performance penalties.
319
320 Future directions:
321
322 Currently __throw makes no differentiation between cleanups and
323 user-defined exception regions. While this makes the implementation
324 simple, it also implies that it is impossible to determine if a
325 user-defined exception handler exists for a given exception without
326 completely unwinding the stack in the process. This is undesirable
327 from the standpoint of debugging, as ideally it would be possible
328 to trap unhandled exceptions in the debugger before the process of
329 unwinding has even started.
330
331 This problem can be solved by marking user-defined handlers in a
332 special way (probably by adding additional bits to exception_table_list).
333 A two-pass scheme could then be used by __throw to iterate
334 through the table. The first pass would search for a relevant
335 user-defined handler for the current context of the throw, and if
336 one is found, the second pass would then invoke all needed cleanups
337 before jumping to the user-defined handler.
338
339 Many languages (including C++ and Ada) make execution of a
340 user-defined handler conditional on the "type" of the exception
341 thrown. (The type of the exception is actually the type of the data
342 that is thrown with the exception.) It will thus be necessary for
343 __throw to be able to determine if a given user-defined
344 exception handler will actually be executed, given the type of
345 exception.
346
347 One scheme is to add additional information to exception_table_list
348 as to the types of exceptions accepted by each handler. __throw
349 can do the type comparisons and then determine if the handler is
350 actually going to be executed.
351
352 There is currently no significant level of debugging support
353 available, other than to place a breakpoint on __throw. While
354 this is sufficient in most cases, it would be helpful to be able to
355 know where a given exception was going to be thrown to before it is
356 actually thrown, and to be able to choose between stopping before
357 every exception region (including cleanups), or just user-defined
358 exception regions. This should be possible to do in the two-pass
359 scheme by adding additional labels to __throw for appropriate
360 breakpoints, and additional debugger commands could be added to
361 query various state variables to determine what actions are to be
362 performed next.
363
364 Another major problem that is being worked on is the issue with stack
365 unwinding on various platforms. Currently the only platforms that have
366 support for the generation of a generic unwinder are the SPARC and MIPS.
367 All other ports require per-function unwinders, which produce large
368 amounts of code bloat.
369
370 For setjmp/longjmp based exception handling, some of the details
371 are as above, but there are some additional details. This section
372 discusses the details.
373
374 We don't use NOTE_INSN_EH_REGION_{BEG,END} pairs. We don't
375 optimize EH regions yet. We don't have to worry about machine
376 specific issues with unwinding the stack, as we rely upon longjmp
377 for all the machine specific details. There is no variable context
378 of a throw, just the one implied by the dynamic handler stack
379 pointed to by the dynamic handler chain. There is no exception
380 table, and no calls to __register_exceptions. __sjthrow is used
381 instead of __throw, and it works by using the dynamic handler
382 chain, and longjmp. -fasynchronous-exceptions has no effect, as
383 the elimination of trivial exception regions is not yet performed.
384
385 A frontend can set protect_cleanup_actions_with_terminate when all
386 the cleanup actions should be protected with an EH region that
387 calls terminate when an unhandled exception is throw. C++ does
388 this, Ada does not. */
389
390
391 #include "config.h"
392 #include "defaults.h"
393 #include <stdio.h>
394 #include "rtl.h"
395 #include "tree.h"
396 #include "flags.h"
397 #include "except.h"
398 #include "function.h"
399 #include "insn-flags.h"
400 #include "expr.h"
401 #include "insn-codes.h"
402 #include "regs.h"
403 #include "hard-reg-set.h"
404 #include "insn-config.h"
405 #include "recog.h"
406 #include "output.h"
407
408 /* One to use setjmp/longjmp method of generating code for exception
409 handling. */
410
411 int exceptions_via_longjmp = 2;
412
413 /* One to enable asynchronous exception support. */
414
415 int asynchronous_exceptions = 0;
416
417 /* One to protect cleanup actions with a handler that calls
418 __terminate, zero otherwise. */
419
420 int protect_cleanup_actions_with_terminate;
421
422 /* A list of labels used for exception handlers. Created by
423 find_exception_handler_labels for the optimization passes. */
424
425 rtx exception_handler_labels;
426
427 /* Nonzero means that __throw was invoked.
428
429 This is used by the C++ frontend to know if code needs to be emitted
430 for __throw or not. */
431
432 int throw_used;
433
434 /* The EH context. Nonzero if the function has already
435 fetched a pointer to the EH context for exception handling. */
436
437 rtx current_function_ehc;
438
439 /* A stack used for keeping track of the currently active exception
440 handling region. As each exception region is started, an entry
441 describing the region is pushed onto this stack. The current
442 region can be found by looking at the top of the stack, and as we
443 exit regions, the corresponding entries are popped.
444
445 Entries cannot overlap; they can be nested. So there is only one
446 entry at most that corresponds to the current instruction, and that
447 is the entry on the top of the stack. */
448
449 static struct eh_stack ehstack;
450
451 /* A queue used for tracking which exception regions have closed but
452 whose handlers have not yet been expanded. Regions are emitted in
453 groups in an attempt to improve paging performance.
454
455 As we exit a region, we enqueue a new entry. The entries are then
456 dequeued during expand_leftover_cleanups and expand_start_all_catch,
457
458 We should redo things so that we either take RTL for the handler,
459 or we expand the handler expressed as a tree immediately at region
460 end time. */
461
462 static struct eh_queue ehqueue;
463
464 /* Insns for all of the exception handlers for the current function.
465 They are currently emitted by the frontend code. */
466
467 rtx catch_clauses;
468
469 /* A TREE_CHAINed list of handlers for regions that are not yet
470 closed. The TREE_VALUE of each entry contains the handler for the
471 corresponding entry on the ehstack. */
472
473 static tree protect_list;
474
475 /* Stacks to keep track of various labels. */
476
477 /* Keeps track of the label to resume to should one want to resume
478 normal control flow out of a handler (instead of, say, returning to
479 the caller of the current function or exiting the program). */
480
481 struct label_node *caught_return_label_stack = NULL;
482
483 /* Keeps track of the label used as the context of a throw to rethrow an
484 exception to the outer exception region. */
485
486 struct label_node *outer_context_label_stack = NULL;
487
488 /* A random data area for the front end's own use. */
489
490 struct label_node *false_label_stack = NULL;
491
492 rtx expand_builtin_return_addr PROTO((enum built_in_function, int, rtx));
493 static void expand_rethrow PROTO((rtx));
494
495 \f
496 /* Various support routines to manipulate the various data structures
497 used by the exception handling code. */
498
499 /* Push a label entry onto the given STACK. */
500
501 void
502 push_label_entry (stack, rlabel, tlabel)
503 struct label_node **stack;
504 rtx rlabel;
505 tree tlabel;
506 {
507 struct label_node *newnode
508 = (struct label_node *) xmalloc (sizeof (struct label_node));
509
510 if (rlabel)
511 newnode->u.rlabel = rlabel;
512 else
513 newnode->u.tlabel = tlabel;
514 newnode->chain = *stack;
515 *stack = newnode;
516 }
517
518 /* Pop a label entry from the given STACK. */
519
520 rtx
521 pop_label_entry (stack)
522 struct label_node **stack;
523 {
524 rtx label;
525 struct label_node *tempnode;
526
527 if (! *stack)
528 return NULL_RTX;
529
530 tempnode = *stack;
531 label = tempnode->u.rlabel;
532 *stack = (*stack)->chain;
533 free (tempnode);
534
535 return label;
536 }
537
538 /* Return the top element of the given STACK. */
539
540 tree
541 top_label_entry (stack)
542 struct label_node **stack;
543 {
544 if (! *stack)
545 return NULL_TREE;
546
547 return (*stack)->u.tlabel;
548 }
549
550 /* Make a copy of ENTRY using xmalloc to allocate the space. */
551
552 static struct eh_entry *
553 copy_eh_entry (entry)
554 struct eh_entry *entry;
555 {
556 struct eh_entry *newentry;
557
558 newentry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
559 bcopy ((char *) entry, (char *) newentry, sizeof (struct eh_entry));
560
561 return newentry;
562 }
563
564 /* Push a new eh_node entry onto STACK. */
565
566 static void
567 push_eh_entry (stack)
568 struct eh_stack *stack;
569 {
570 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
571 struct eh_entry *entry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
572
573 entry->outer_context = gen_label_rtx ();
574 entry->exception_handler_label = gen_label_rtx ();
575 entry->finalization = NULL_TREE;
576
577 node->entry = entry;
578 node->chain = stack->top;
579 stack->top = node;
580 }
581
582 /* Pop an entry from the given STACK. */
583
584 static struct eh_entry *
585 pop_eh_entry (stack)
586 struct eh_stack *stack;
587 {
588 struct eh_node *tempnode;
589 struct eh_entry *tempentry;
590
591 tempnode = stack->top;
592 tempentry = tempnode->entry;
593 stack->top = stack->top->chain;
594 free (tempnode);
595
596 return tempentry;
597 }
598
599 /* Enqueue an ENTRY onto the given QUEUE. */
600
601 static void
602 enqueue_eh_entry (queue, entry)
603 struct eh_queue *queue;
604 struct eh_entry *entry;
605 {
606 struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
607
608 node->entry = entry;
609 node->chain = NULL;
610
611 if (queue->head == NULL)
612 {
613 queue->head = node;
614 }
615 else
616 {
617 queue->tail->chain = node;
618 }
619 queue->tail = node;
620 }
621
622 /* Dequeue an entry from the given QUEUE. */
623
624 static struct eh_entry *
625 dequeue_eh_entry (queue)
626 struct eh_queue *queue;
627 {
628 struct eh_node *tempnode;
629 struct eh_entry *tempentry;
630
631 if (queue->head == NULL)
632 return NULL;
633
634 tempnode = queue->head;
635 queue->head = queue->head->chain;
636
637 tempentry = tempnode->entry;
638 free (tempnode);
639
640 return tempentry;
641 }
642 \f
643 /* Routine to see if exception exception handling is turned on.
644 DO_WARN is non-zero if we want to inform the user that exception
645 handling is turned off.
646
647 This is used to ensure that -fexceptions has been specified if the
648 compiler tries to use any exception-specific functions. */
649
650 int
651 doing_eh (do_warn)
652 int do_warn;
653 {
654 if (! flag_exceptions)
655 {
656 static int warned = 0;
657 if (! warned && do_warn)
658 {
659 error ("exception handling disabled, use -fexceptions to enable");
660 warned = 1;
661 }
662 return 0;
663 }
664 return 1;
665 }
666
667 /* Given a return address in ADDR, determine the address we should use
668 to find the corresponding EH region. */
669
670 rtx
671 eh_outer_context (addr)
672 rtx addr;
673 {
674 /* First mask out any unwanted bits. */
675 #ifdef MASK_RETURN_ADDR
676 expand_and (addr, MASK_RETURN_ADDR, addr);
677 #endif
678
679 /* Then adjust to find the real return address. */
680 #if defined (RETURN_ADDR_OFFSET)
681 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
682 #endif
683
684 return addr;
685 }
686
687 /* Start a new exception region for a region of code that has a
688 cleanup action and push the HANDLER for the region onto
689 protect_list. All of the regions created with add_partial_entry
690 will be ended when end_protect_partials is invoked. */
691
692 void
693 add_partial_entry (handler)
694 tree handler;
695 {
696 expand_eh_region_start ();
697
698 /* Make sure the entry is on the correct obstack. */
699 push_obstacks_nochange ();
700 resume_temporary_allocation ();
701
702 /* Because this is a cleanup action, we may have to protect the handler
703 with __terminate. */
704 handler = protect_with_terminate (handler);
705
706 protect_list = tree_cons (NULL_TREE, handler, protect_list);
707 pop_obstacks ();
708 }
709
710 /* Emit code to get EH context to current function. */
711
712 static rtx
713 call_get_eh_context ()
714 {
715 static tree fn;
716 tree expr;
717
718 if (fn == NULL_TREE)
719 {
720 tree fntype;
721 fn = get_identifier ("__get_eh_context");
722 push_obstacks_nochange ();
723 end_temporary_allocation ();
724 fntype = build_pointer_type (build_pointer_type
725 (build_pointer_type (void_type_node)));
726 fntype = build_function_type (fntype, NULL_TREE);
727 fn = build_decl (FUNCTION_DECL, fn, fntype);
728 DECL_EXTERNAL (fn) = 1;
729 TREE_PUBLIC (fn) = 1;
730 DECL_ARTIFICIAL (fn) = 1;
731 TREE_READONLY (fn) = 1;
732 make_decl_rtl (fn, NULL_PTR, 1);
733 assemble_external (fn);
734 pop_obstacks ();
735 }
736
737 expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
738 expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
739 expr, NULL_TREE, NULL_TREE);
740 TREE_SIDE_EFFECTS (expr) = 1;
741
742 return copy_to_reg (expand_expr (expr, NULL_RTX, VOIDmode, 0));
743 }
744
745 /* Get a reference to the EH context.
746 We will only generate a register for the current function EH context here,
747 and emit a USE insn to mark that this is a EH context register.
748
749 Later, emit_eh_context will emit needed call to __get_eh_context
750 in libgcc2, and copy the value to the register we have generated. */
751
752 rtx
753 get_eh_context ()
754 {
755 if (current_function_ehc == 0)
756 {
757 rtx insn;
758
759 current_function_ehc = gen_reg_rtx (Pmode);
760
761 insn = gen_rtx (USE,
762 GET_MODE (current_function_ehc),
763 current_function_ehc);
764 insn = emit_insn_before (insn, get_first_nonparm_insn ());
765
766 REG_NOTES (insn)
767 = gen_rtx (EXPR_LIST,
768 REG_EH_CONTEXT, current_function_ehc,
769 REG_NOTES (insn));
770 }
771 return current_function_ehc;
772 }
773
774 /* Get a reference to the dynamic handler chain. It points to the
775 pointer to the next element in the dynamic handler chain. It ends
776 when there are no more elements in the dynamic handler chain, when
777 the value is &top_elt from libgcc2.c. Immediately after the
778 pointer, is an area suitable for setjmp/longjmp when
779 DONT_USE_BUILTIN_SETJMP is defined, and an area suitable for
780 __builtin_setjmp/__builtin_longjmp when DONT_USE_BUILTIN_SETJMP
781 isn't defined. */
782
783 rtx
784 get_dynamic_handler_chain ()
785 {
786 rtx ehc, dhc, result;
787
788 ehc = get_eh_context ();
789 dhc = ehc;
790
791 result = copy_to_reg (dhc);
792
793 /* We don't want a copy of the dcc, but rather, the single dcc. */
794 return gen_rtx (MEM, Pmode, result);
795 }
796
797 /* Get a reference to the dynamic cleanup chain. It points to the
798 pointer to the next element in the dynamic cleanup chain.
799 Immediately after the pointer, are two Pmode variables, one for a
800 pointer to a function that performs the cleanup action, and the
801 second, the argument to pass to that function. */
802
803 rtx
804 get_dynamic_cleanup_chain ()
805 {
806 rtx dhc, dcc, result;
807
808 dhc = get_dynamic_handler_chain ();
809 dcc = plus_constant (dhc, GET_MODE_SIZE (Pmode));
810
811 result = copy_to_reg (dcc);
812
813 /* We don't want a copy of the dcc, but rather, the single dcc. */
814 return gen_rtx (MEM, Pmode, result);
815 }
816
817 /* Generate code to evaluate X and jump to LABEL if the value is nonzero.
818 LABEL is an rtx of code CODE_LABEL, in this function. */
819
820 void
821 jumpif_rtx (x, label)
822 rtx x;
823 rtx label;
824 {
825 jumpif (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
826 }
827
828 /* Generate code to evaluate X and jump to LABEL if the value is zero.
829 LABEL is an rtx of code CODE_LABEL, in this function. */
830
831 void
832 jumpifnot_rtx (x, label)
833 rtx x;
834 rtx label;
835 {
836 jumpifnot (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
837 }
838
839 /* Start a dynamic cleanup on the EH runtime dynamic cleanup stack.
840 We just need to create an element for the cleanup list, and push it
841 into the chain.
842
843 A dynamic cleanup is a cleanup action implied by the presence of an
844 element on the EH runtime dynamic cleanup stack that is to be
845 performed when an exception is thrown. The cleanup action is
846 performed by __sjthrow when an exception is thrown. Only certain
847 actions can be optimized into dynamic cleanup actions. For the
848 restrictions on what actions can be performed using this routine,
849 see expand_eh_region_start_tree. */
850
851 static void
852 start_dynamic_cleanup (func, arg)
853 tree func;
854 tree arg;
855 {
856 rtx dhc, dcc;
857 rtx new_func, new_arg;
858 rtx x, buf;
859 int size;
860
861 /* We allocate enough room for a pointer to the function, and
862 one argument. */
863 size = 2;
864
865 /* XXX, FIXME: The stack space allocated this way is too long lived,
866 but there is no allocation routine that allocates at the level of
867 the last binding contour. */
868 buf = assign_stack_local (BLKmode,
869 GET_MODE_SIZE (Pmode)*(size+1),
870 0);
871
872 buf = change_address (buf, Pmode, NULL_RTX);
873
874 /* Store dcc into the first word of the newly allocated buffer. */
875
876 dcc = get_dynamic_cleanup_chain ();
877 emit_move_insn (buf, dcc);
878
879 /* Store func and arg into the cleanup list element. */
880
881 new_func = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
882 GET_MODE_SIZE (Pmode)));
883 new_arg = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
884 GET_MODE_SIZE (Pmode)*2));
885 x = expand_expr (func, new_func, Pmode, 0);
886 if (x != new_func)
887 emit_move_insn (new_func, x);
888
889 x = expand_expr (arg, new_arg, Pmode, 0);
890 if (x != new_arg)
891 emit_move_insn (new_arg, x);
892
893 /* Update the cleanup chain. */
894
895 emit_move_insn (dcc, XEXP (buf, 0));
896 }
897
898 /* Emit RTL to start a dynamic handler on the EH runtime dynamic
899 handler stack. This should only be used by expand_eh_region_start
900 or expand_eh_region_start_tree. */
901
902 static void
903 start_dynamic_handler ()
904 {
905 rtx dhc, dcc;
906 rtx x, arg, buf;
907 int size;
908
909 #ifndef DONT_USE_BUILTIN_SETJMP
910 /* The number of Pmode words for the setjmp buffer, when using the
911 builtin setjmp/longjmp, see expand_builtin, case
912 BUILT_IN_LONGJMP. */
913 size = 5;
914 #else
915 #ifdef JMP_BUF_SIZE
916 size = JMP_BUF_SIZE;
917 #else
918 /* Should be large enough for most systems, if it is not,
919 JMP_BUF_SIZE should be defined with the proper value. It will
920 also tend to be larger than necessary for most systems, a more
921 optimal port will define JMP_BUF_SIZE. */
922 size = FIRST_PSEUDO_REGISTER+2;
923 #endif
924 #endif
925 /* XXX, FIXME: The stack space allocated this way is too long lived,
926 but there is no allocation routine that allocates at the level of
927 the last binding contour. */
928 arg = assign_stack_local (BLKmode,
929 GET_MODE_SIZE (Pmode)*(size+1),
930 0);
931
932 arg = change_address (arg, Pmode, NULL_RTX);
933
934 /* Store dhc into the first word of the newly allocated buffer. */
935
936 dhc = get_dynamic_handler_chain ();
937 dcc = gen_rtx (MEM, Pmode, plus_constant (XEXP (arg, 0),
938 GET_MODE_SIZE (Pmode)));
939 emit_move_insn (arg, dhc);
940
941 /* Zero out the start of the cleanup chain. */
942 emit_move_insn (dcc, const0_rtx);
943
944 /* The jmpbuf starts two words into the area allocated. */
945 buf = plus_constant (XEXP (arg, 0), GET_MODE_SIZE (Pmode)*2);
946
947 #ifdef DONT_USE_BUILTIN_SETJMP
948 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, 1, SImode, 1,
949 buf, Pmode);
950 #else
951 x = expand_builtin_setjmp (buf, NULL_RTX);
952 #endif
953
954 /* If we come back here for a catch, transfer control to the
955 handler. */
956
957 jumpif_rtx (x, ehstack.top->entry->exception_handler_label);
958
959 /* We are committed to this, so update the handler chain. */
960
961 emit_move_insn (dhc, XEXP (arg, 0));
962 }
963
964 /* Start an exception handling region for the given cleanup action.
965 All instructions emitted after this point are considered to be part
966 of the region until expand_eh_region_end is invoked. CLEANUP is
967 the cleanup action to perform. The return value is true if the
968 exception region was optimized away. If that case,
969 expand_eh_region_end does not need to be called for this cleanup,
970 nor should it be.
971
972 This routine notices one particular common case in C++ code
973 generation, and optimizes it so as to not need the exception
974 region. It works by creating a dynamic cleanup action, instead of
975 of a using an exception region. */
976
977 int
978 expand_eh_region_start_tree (decl, cleanup)
979 tree decl;
980 tree cleanup;
981 {
982 rtx note;
983
984 /* This is the old code. */
985 if (! doing_eh (0))
986 return 0;
987
988 /* The optimization only applies to actions protected with
989 terminate, and only applies if we are using the setjmp/longjmp
990 codegen method. */
991 if (exceptions_via_longjmp
992 && protect_cleanup_actions_with_terminate)
993 {
994 tree func, arg;
995 tree args;
996
997 /* Ignore any UNSAVE_EXPR. */
998 if (TREE_CODE (cleanup) == UNSAVE_EXPR)
999 cleanup = TREE_OPERAND (cleanup, 0);
1000
1001 /* Further, it only applies if the action is a call, if there
1002 are 2 arguments, and if the second argument is 2. */
1003
1004 if (TREE_CODE (cleanup) == CALL_EXPR
1005 && (args = TREE_OPERAND (cleanup, 1))
1006 && (func = TREE_OPERAND (cleanup, 0))
1007 && (arg = TREE_VALUE (args))
1008 && (args = TREE_CHAIN (args))
1009
1010 /* is the second argument 2? */
1011 && TREE_CODE (TREE_VALUE (args)) == INTEGER_CST
1012 && TREE_INT_CST_LOW (TREE_VALUE (args)) == 2
1013 && TREE_INT_CST_HIGH (TREE_VALUE (args)) == 0
1014
1015 /* Make sure there are no other arguments. */
1016 && TREE_CHAIN (args) == NULL_TREE)
1017 {
1018 /* Arrange for returns and gotos to pop the entry we make on the
1019 dynamic cleanup stack. */
1020 expand_dcc_cleanup (decl);
1021 start_dynamic_cleanup (func, arg);
1022 return 1;
1023 }
1024 }
1025
1026 expand_eh_region_start_for_decl (decl);
1027 ehstack.top->entry->finalization = cleanup;
1028
1029 return 0;
1030 }
1031
1032 /* Just like expand_eh_region_start, except if a cleanup action is
1033 entered on the cleanup chain, the TREE_PURPOSE of the element put
1034 on the chain is DECL. DECL should be the associated VAR_DECL, if
1035 any, otherwise it should be NULL_TREE. */
1036
1037 void
1038 expand_eh_region_start_for_decl (decl)
1039 tree decl;
1040 {
1041 rtx note;
1042
1043 /* This is the old code. */
1044 if (! doing_eh (0))
1045 return;
1046
1047 if (exceptions_via_longjmp)
1048 {
1049 /* We need a new block to record the start and end of the
1050 dynamic handler chain. We could always do this, but we
1051 really want to permit jumping into such a block, and we want
1052 to avoid any errors or performance impact in the SJ EH code
1053 for now. */
1054 expand_start_bindings (0);
1055
1056 /* But we don't need or want a new temporary level. */
1057 pop_temp_slots ();
1058
1059 /* Mark this block as created by expand_eh_region_start. This
1060 is so that we can pop the block with expand_end_bindings
1061 automatically. */
1062 mark_block_as_eh_region ();
1063
1064 /* Arrange for returns and gotos to pop the entry we make on the
1065 dynamic handler stack. */
1066 expand_dhc_cleanup (decl);
1067 }
1068
1069 push_eh_entry (&ehstack);
1070 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG);
1071 NOTE_BLOCK_NUMBER (note)
1072 = CODE_LABEL_NUMBER (ehstack.top->entry->exception_handler_label);
1073 if (exceptions_via_longjmp)
1074 start_dynamic_handler ();
1075 }
1076
1077 /* Start an exception handling region. All instructions emitted after
1078 this point are considered to be part of the region until
1079 expand_eh_region_end is invoked. */
1080
1081 void
1082 expand_eh_region_start ()
1083 {
1084 expand_eh_region_start_for_decl (NULL_TREE);
1085 }
1086
1087 /* End an exception handling region. The information about the region
1088 is found on the top of ehstack.
1089
1090 HANDLER is either the cleanup for the exception region, or if we're
1091 marking the end of a try block, HANDLER is integer_zero_node.
1092
1093 HANDLER will be transformed to rtl when expand_leftover_cleanups
1094 is invoked. */
1095
1096 void
1097 expand_eh_region_end (handler)
1098 tree handler;
1099 {
1100 struct eh_entry *entry;
1101 rtx note;
1102
1103 if (! doing_eh (0))
1104 return;
1105
1106 entry = pop_eh_entry (&ehstack);
1107
1108 note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END);
1109 NOTE_BLOCK_NUMBER (note)
1110 = CODE_LABEL_NUMBER (entry->exception_handler_label);
1111 if (exceptions_via_longjmp == 0
1112 /* We share outer_context between regions; only emit it once. */
1113 && INSN_UID (entry->outer_context) == 0)
1114 {
1115 rtx label;
1116
1117 label = gen_label_rtx ();
1118 emit_jump (label);
1119
1120 /* Emit a label marking the end of this exception region that
1121 is used for rethrowing into the outer context. */
1122 emit_label (entry->outer_context);
1123 expand_internal_throw ();
1124
1125 emit_label (label);
1126 }
1127
1128 entry->finalization = handler;
1129
1130 enqueue_eh_entry (&ehqueue, entry);
1131
1132 /* If we have already started ending the bindings, don't recurse.
1133 This only happens when exceptions_via_longjmp is true. */
1134 if (is_eh_region ())
1135 {
1136 /* Because we don't need or want a new temporary level and
1137 because we didn't create one in expand_eh_region_start,
1138 create a fake one now to avoid removing one in
1139 expand_end_bindings. */
1140 push_temp_slots ();
1141
1142 mark_block_as_not_eh_region ();
1143
1144 /* Maybe do this to prevent jumping in and so on... */
1145 expand_end_bindings (NULL_TREE, 0, 0);
1146 }
1147 }
1148
1149 /* End the EH region for a goto fixup. We only need them in the region-based
1150 EH scheme. */
1151
1152 void
1153 expand_fixup_region_start ()
1154 {
1155 if (! doing_eh (0) || exceptions_via_longjmp)
1156 return;
1157
1158 expand_eh_region_start ();
1159 }
1160
1161 /* End the EH region for a goto fixup. CLEANUP is the cleanup we just
1162 expanded; to avoid running it twice if it throws, we look through the
1163 ehqueue for a matching region and rethrow from its outer_context. */
1164
1165 void
1166 expand_fixup_region_end (cleanup)
1167 tree cleanup;
1168 {
1169 struct eh_node *node;
1170
1171 if (! doing_eh (0) || exceptions_via_longjmp)
1172 return;
1173
1174 for (node = ehstack.top; node && node->entry->finalization != cleanup; )
1175 node = node->chain;
1176 if (node == 0)
1177 for (node = ehqueue.head; node && node->entry->finalization != cleanup; )
1178 node = node->chain;
1179 if (node == 0)
1180 abort ();
1181
1182 ehstack.top->entry->outer_context = node->entry->outer_context;
1183
1184 /* Just rethrow. size_zero_node is just a NOP. */
1185 expand_eh_region_end (size_zero_node);
1186 }
1187
1188 /* If we are using the setjmp/longjmp EH codegen method, we emit a
1189 call to __sjthrow.
1190
1191 Otherwise, we emit a call to __throw and note that we threw
1192 something, so we know we need to generate the necessary code for
1193 __throw.
1194
1195 Before invoking throw, the __eh_pc variable must have been set up
1196 to contain the PC being thrown from. This address is used by
1197 __throw to determine which exception region (if any) is
1198 responsible for handling the exception. */
1199
1200 void
1201 emit_throw ()
1202 {
1203 if (exceptions_via_longjmp)
1204 {
1205 emit_library_call (sjthrow_libfunc, 0, VOIDmode, 0);
1206 }
1207 else
1208 {
1209 #ifdef JUMP_TO_THROW
1210 emit_indirect_jump (throw_libfunc);
1211 #else
1212 emit_library_call (throw_libfunc, 0, VOIDmode, 0);
1213 #endif
1214 throw_used = 1;
1215 }
1216 emit_barrier ();
1217 }
1218
1219 /* Throw the current exception. If appropriate, this is done by jumping
1220 to the next handler. */
1221
1222 void
1223 expand_internal_throw ()
1224 {
1225 emit_throw ();
1226 }
1227
1228 /* Called from expand_exception_blocks and expand_end_catch_block to
1229 emit any pending handlers/cleanups queued from expand_eh_region_end. */
1230
1231 void
1232 expand_leftover_cleanups ()
1233 {
1234 struct eh_entry *entry;
1235
1236 while ((entry = dequeue_eh_entry (&ehqueue)) != 0)
1237 {
1238 rtx prev;
1239
1240 /* A leftover try block. Shouldn't be one here. */
1241 if (entry->finalization == integer_zero_node)
1242 abort ();
1243
1244 /* Output the label for the start of the exception handler. */
1245 emit_label (entry->exception_handler_label);
1246
1247 #ifdef HAVE_exception_receiver
1248 if (! exceptions_via_longjmp)
1249 if (HAVE_exception_receiver)
1250 emit_insn (gen_exception_receiver ());
1251 #endif
1252
1253 #ifdef HAVE_nonlocal_goto_receiver
1254 if (! exceptions_via_longjmp)
1255 if (HAVE_nonlocal_goto_receiver)
1256 emit_insn (gen_nonlocal_goto_receiver ());
1257 #endif
1258
1259 /* And now generate the insns for the handler. */
1260 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1261
1262 prev = get_last_insn ();
1263 if (prev == NULL || GET_CODE (prev) != BARRIER)
1264 /* Emit code to throw to the outer context if we fall off
1265 the end of the handler. */
1266 expand_rethrow (entry->outer_context);
1267
1268 do_pending_stack_adjust ();
1269 free (entry);
1270 }
1271 }
1272
1273 /* Called at the start of a block of try statements. */
1274 void
1275 expand_start_try_stmts ()
1276 {
1277 if (! doing_eh (1))
1278 return;
1279
1280 expand_eh_region_start ();
1281 }
1282
1283 /* Generate RTL for the start of a group of catch clauses.
1284
1285 It is responsible for starting a new instruction sequence for the
1286 instructions in the catch block, and expanding the handlers for the
1287 internally-generated exception regions nested within the try block
1288 corresponding to this catch block. */
1289
1290 void
1291 expand_start_all_catch ()
1292 {
1293 struct eh_entry *entry;
1294 tree label;
1295 rtx outer_context;
1296
1297 if (! doing_eh (1))
1298 return;
1299
1300 outer_context = ehstack.top->entry->outer_context;
1301
1302 /* End the try block. */
1303 expand_eh_region_end (integer_zero_node);
1304
1305 emit_line_note (input_filename, lineno);
1306 label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1307
1308 /* The label for the exception handling block that we will save.
1309 This is Lresume in the documentation. */
1310 expand_label (label);
1311
1312 /* Push the label that points to where normal flow is resumed onto
1313 the top of the label stack. */
1314 push_label_entry (&caught_return_label_stack, NULL_RTX, label);
1315
1316 /* Start a new sequence for all the catch blocks. We will add this
1317 to the global sequence catch_clauses when we have completed all
1318 the handlers in this handler-seq. */
1319 start_sequence ();
1320
1321 while (1)
1322 {
1323 rtx prev;
1324
1325 entry = dequeue_eh_entry (&ehqueue);
1326 /* Emit the label for the exception handler for this region, and
1327 expand the code for the handler.
1328
1329 Note that a catch region is handled as a side-effect here;
1330 for a try block, entry->finalization will contain
1331 integer_zero_node, so no code will be generated in the
1332 expand_expr call below. But, the label for the handler will
1333 still be emitted, so any code emitted after this point will
1334 end up being the handler. */
1335 emit_label (entry->exception_handler_label);
1336
1337 #ifdef HAVE_exception_receiver
1338 if (! exceptions_via_longjmp)
1339 if (HAVE_exception_receiver)
1340 emit_insn (gen_exception_receiver ());
1341 #endif
1342
1343 #ifdef HAVE_nonlocal_goto_receiver
1344 if (! exceptions_via_longjmp)
1345 if (HAVE_nonlocal_goto_receiver)
1346 emit_insn (gen_nonlocal_goto_receiver ());
1347 #endif
1348
1349 /* When we get down to the matching entry for this try block, stop. */
1350 if (entry->finalization == integer_zero_node)
1351 {
1352 /* Don't forget to free this entry. */
1353 free (entry);
1354 break;
1355 }
1356
1357 /* And now generate the insns for the handler. */
1358 expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1359
1360 prev = get_last_insn ();
1361 if (prev == NULL || GET_CODE (prev) != BARRIER)
1362 /* Code to throw out to outer context when we fall off end
1363 of the handler. We can't do this here for catch blocks,
1364 so it's done in expand_end_all_catch instead. */
1365 expand_rethrow (entry->outer_context);
1366
1367 do_pending_stack_adjust ();
1368 free (entry);
1369 }
1370
1371 /* If we are not doing setjmp/longjmp EH, because we are reordered
1372 out of line, we arrange to rethrow in the outer context. We need to
1373 do this because we are not physically within the region, if any, that
1374 logically contains this catch block. */
1375 if (! exceptions_via_longjmp)
1376 {
1377 expand_eh_region_start ();
1378 ehstack.top->entry->outer_context = outer_context;
1379 }
1380 }
1381
1382 /* Finish up the catch block. At this point all the insns for the
1383 catch clauses have already been generated, so we only have to add
1384 them to the catch_clauses list. We also want to make sure that if
1385 we fall off the end of the catch clauses that we rethrow to the
1386 outer EH region. */
1387
1388 void
1389 expand_end_all_catch ()
1390 {
1391 rtx new_catch_clause, outer_context = NULL_RTX;
1392
1393 if (! doing_eh (1))
1394 return;
1395
1396 if (! exceptions_via_longjmp)
1397 {
1398 outer_context = ehstack.top->entry->outer_context;
1399
1400 /* Finish the rethrow region. size_zero_node is just a NOP. */
1401 expand_eh_region_end (size_zero_node);
1402 }
1403
1404 /* Code to throw out to outer context, if we fall off end of catch
1405 handlers. This is rethrow (Lresume, same id, same obj) in the
1406 documentation. We use Lresume because we know that it will throw
1407 to the correct context.
1408
1409 In other words, if the catch handler doesn't exit or return, we
1410 do a "throw" (using the address of Lresume as the point being
1411 thrown from) so that the outer EH region can then try to process
1412 the exception. */
1413 expand_rethrow (outer_context);
1414
1415 /* Now we have the complete catch sequence. */
1416 new_catch_clause = get_insns ();
1417 end_sequence ();
1418
1419 /* This level of catch blocks is done, so set up the successful
1420 catch jump label for the next layer of catch blocks. */
1421 pop_label_entry (&caught_return_label_stack);
1422 pop_label_entry (&outer_context_label_stack);
1423
1424 /* Add the new sequence of catches to the main one for this function. */
1425 push_to_sequence (catch_clauses);
1426 emit_insns (new_catch_clause);
1427 catch_clauses = get_insns ();
1428 end_sequence ();
1429
1430 /* Here we fall through into the continuation code. */
1431 }
1432
1433 /* Rethrow from the outer context LABEL. */
1434
1435 static void
1436 expand_rethrow (label)
1437 rtx label;
1438 {
1439 if (exceptions_via_longjmp)
1440 emit_throw ();
1441 else
1442 emit_jump (label);
1443 }
1444
1445 /* End all the pending exception regions on protect_list. The handlers
1446 will be emitted when expand_leftover_cleanups is invoked. */
1447
1448 void
1449 end_protect_partials ()
1450 {
1451 while (protect_list)
1452 {
1453 expand_eh_region_end (TREE_VALUE (protect_list));
1454 protect_list = TREE_CHAIN (protect_list);
1455 }
1456 }
1457
1458 /* Arrange for __terminate to be called if there is an unhandled throw
1459 from within E. */
1460
1461 tree
1462 protect_with_terminate (e)
1463 tree e;
1464 {
1465 /* We only need to do this when using setjmp/longjmp EH and the
1466 language requires it, as otherwise we protect all of the handlers
1467 at once, if we need to. */
1468 if (exceptions_via_longjmp && protect_cleanup_actions_with_terminate)
1469 {
1470 tree handler, result;
1471
1472 /* All cleanups must be on the function_obstack. */
1473 push_obstacks_nochange ();
1474 resume_temporary_allocation ();
1475
1476 handler = make_node (RTL_EXPR);
1477 TREE_TYPE (handler) = void_type_node;
1478 RTL_EXPR_RTL (handler) = const0_rtx;
1479 TREE_SIDE_EFFECTS (handler) = 1;
1480 start_sequence_for_rtl_expr (handler);
1481
1482 emit_library_call (terminate_libfunc, 0, VOIDmode, 0);
1483 emit_barrier ();
1484
1485 RTL_EXPR_SEQUENCE (handler) = get_insns ();
1486 end_sequence ();
1487
1488 result = build (TRY_CATCH_EXPR, TREE_TYPE (e), e, handler);
1489 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
1490 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
1491 TREE_READONLY (result) = TREE_READONLY (e);
1492
1493 pop_obstacks ();
1494
1495 e = result;
1496 }
1497
1498 return e;
1499 }
1500 \f
1501 /* The exception table that we build that is used for looking up and
1502 dispatching exceptions, the current number of entries, and its
1503 maximum size before we have to extend it.
1504
1505 The number in eh_table is the code label number of the exception
1506 handler for the region. This is added by add_eh_table_entry and
1507 used by output_exception_table_entry. */
1508
1509 static int *eh_table;
1510 static int eh_table_size;
1511 static int eh_table_max_size;
1512
1513 /* Note the need for an exception table entry for region N. If we
1514 don't need to output an explicit exception table, avoid all of the
1515 extra work.
1516
1517 Called from final_scan_insn when a NOTE_INSN_EH_REGION_BEG is seen.
1518 N is the NOTE_BLOCK_NUMBER of the note, which comes from the code
1519 label number of the exception handler for the region. */
1520
1521 void
1522 add_eh_table_entry (n)
1523 int n;
1524 {
1525 #ifndef OMIT_EH_TABLE
1526 if (eh_table_size >= eh_table_max_size)
1527 {
1528 if (eh_table)
1529 {
1530 eh_table_max_size += eh_table_max_size>>1;
1531
1532 if (eh_table_max_size < 0)
1533 abort ();
1534
1535 eh_table = (int *) xrealloc (eh_table,
1536 eh_table_max_size * sizeof (int));
1537 }
1538 else
1539 {
1540 eh_table_max_size = 252;
1541 eh_table = (int *) xmalloc (eh_table_max_size * sizeof (int));
1542 }
1543 }
1544 eh_table[eh_table_size++] = n;
1545 #endif
1546 }
1547
1548 /* Return a non-zero value if we need to output an exception table.
1549
1550 On some platforms, we don't have to output a table explicitly.
1551 This routine doesn't mean we don't have one. */
1552
1553 int
1554 exception_table_p ()
1555 {
1556 if (eh_table)
1557 return 1;
1558
1559 return 0;
1560 }
1561
1562 /* 1 if we need a static constructor to register EH table info. */
1563
1564 int
1565 register_exception_table_p ()
1566 {
1567 #if defined (DWARF2_UNWIND_INFO)
1568 return 0;
1569 #endif
1570
1571 return exception_table_p ();
1572 }
1573
1574 /* Output the entry of the exception table corresponding to to the
1575 exception region numbered N to file FILE.
1576
1577 N is the code label number corresponding to the handler of the
1578 region. */
1579
1580 static void
1581 output_exception_table_entry (file, n)
1582 FILE *file;
1583 int n;
1584 {
1585 char buf[256];
1586 rtx sym;
1587
1588 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n);
1589 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1590 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1591
1592 ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n);
1593 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1594 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1595
1596 ASM_GENERATE_INTERNAL_LABEL (buf, "L", n);
1597 sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1598 assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1599
1600 putc ('\n', file); /* blank line */
1601 }
1602
1603 /* Output the exception table if we have and need one. */
1604
1605 void
1606 output_exception_table ()
1607 {
1608 int i;
1609 extern FILE *asm_out_file;
1610
1611 if (! doing_eh (0) || ! eh_table)
1612 return;
1613
1614 exception_section ();
1615
1616 /* Beginning marker for table. */
1617 assemble_align (GET_MODE_ALIGNMENT (ptr_mode));
1618 assemble_label ("__EXCEPTION_TABLE__");
1619
1620 for (i = 0; i < eh_table_size; ++i)
1621 output_exception_table_entry (asm_out_file, eh_table[i]);
1622
1623 free (eh_table);
1624
1625 /* Ending marker for table. */
1626 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1627 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1628 assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1629 putc ('\n', asm_out_file); /* blank line */
1630 }
1631
1632 /* Generate code to initialize the exception table at program startup
1633 time. */
1634
1635 void
1636 register_exception_table ()
1637 {
1638 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__register_exceptions"), 0,
1639 VOIDmode, 1,
1640 gen_rtx (SYMBOL_REF, Pmode, "__EXCEPTION_TABLE__"),
1641 Pmode);
1642 }
1643 \f
1644 /* Emit code to get EH context.
1645
1646 We have to scan thru the code to find possible EH context registers.
1647 Inlined functions may use it too, and thus we'll have to be able
1648 to change them too.
1649
1650 This is done only if using exceptions_via_longjmp. */
1651
1652 void
1653 emit_eh_context ()
1654 {
1655 rtx insn;
1656 rtx ehc = 0;
1657
1658 if (! doing_eh (0))
1659 return;
1660
1661 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1662 if (GET_CODE (insn) == INSN
1663 && GET_CODE (PATTERN (insn)) == USE)
1664 {
1665 rtx reg = find_reg_note (insn, REG_EH_CONTEXT, 0);
1666 if (reg)
1667 {
1668 rtx insns;
1669
1670 start_sequence ();
1671
1672 /* If this is the first use insn, emit the call here. */
1673 if (ehc == 0)
1674 ehc = call_get_eh_context ();
1675
1676 emit_move_insn (XEXP (reg, 0), ehc);
1677 insns = get_insns ();
1678 end_sequence ();
1679
1680 emit_insns_before (insns, insn);
1681 }
1682 }
1683 }
1684
1685 /* Scan the current insns and build a list of handler labels. The
1686 resulting list is placed in the global variable exception_handler_labels.
1687
1688 It is called after the last exception handling region is added to
1689 the current function (when the rtl is almost all built for the
1690 current function) and before the jump optimization pass. */
1691
1692 void
1693 find_exception_handler_labels ()
1694 {
1695 rtx insn;
1696 int max_labelno = max_label_num ();
1697 int min_labelno = get_first_label_num ();
1698 rtx *labels;
1699
1700 exception_handler_labels = NULL_RTX;
1701
1702 /* If we aren't doing exception handling, there isn't much to check. */
1703 if (! doing_eh (0))
1704 return;
1705
1706 /* Generate a handy reference to each label. */
1707
1708 /* We call xmalloc here instead of alloca; we did the latter in the past,
1709 but found that it can sometimes end up being asked to allocate space
1710 for more than 1 million labels. */
1711 labels = (rtx *) xmalloc ((max_labelno - min_labelno) * sizeof (rtx));
1712 bzero ((char *) labels, (max_labelno - min_labelno) * sizeof (rtx));
1713
1714 /* Arrange for labels to be indexed directly by CODE_LABEL_NUMBER. */
1715 labels -= min_labelno;
1716
1717 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1718 {
1719 if (GET_CODE (insn) == CODE_LABEL)
1720 if (CODE_LABEL_NUMBER (insn) >= min_labelno
1721 && CODE_LABEL_NUMBER (insn) < max_labelno)
1722 labels[CODE_LABEL_NUMBER (insn)] = insn;
1723 }
1724
1725 /* For each start of a region, add its label to the list. */
1726
1727 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1728 {
1729 if (GET_CODE (insn) == NOTE
1730 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
1731 {
1732 rtx label = NULL_RTX;
1733
1734 if (NOTE_BLOCK_NUMBER (insn) >= min_labelno
1735 && NOTE_BLOCK_NUMBER (insn) < max_labelno)
1736 {
1737 label = labels[NOTE_BLOCK_NUMBER (insn)];
1738
1739 if (label)
1740 exception_handler_labels
1741 = gen_rtx (EXPR_LIST, VOIDmode,
1742 label, exception_handler_labels);
1743 else
1744 warning ("didn't find handler for EH region %d",
1745 NOTE_BLOCK_NUMBER (insn));
1746 }
1747 else
1748 warning ("mismatched EH region %d", NOTE_BLOCK_NUMBER (insn));
1749 }
1750 }
1751
1752 free (labels + min_labelno);
1753 }
1754
1755 /* Perform sanity checking on the exception_handler_labels list.
1756
1757 Can be called after find_exception_handler_labels is called to
1758 build the list of exception handlers for the current function and
1759 before we finish processing the current function. */
1760
1761 void
1762 check_exception_handler_labels ()
1763 {
1764 rtx insn, handler;
1765
1766 /* If we aren't doing exception handling, there isn't much to check. */
1767 if (! doing_eh (0))
1768 return;
1769
1770 /* Ensure that the CODE_LABEL_NUMBER for the CODE_LABEL entry point
1771 in each handler corresponds to the CODE_LABEL_NUMBER of the
1772 handler. */
1773
1774 for (handler = exception_handler_labels;
1775 handler;
1776 handler = XEXP (handler, 1))
1777 {
1778 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1779 {
1780 if (GET_CODE (insn) == CODE_LABEL)
1781 {
1782 if (CODE_LABEL_NUMBER (insn)
1783 == CODE_LABEL_NUMBER (XEXP (handler, 0)))
1784 {
1785 if (insn != XEXP (handler, 0))
1786 warning ("mismatched handler %d",
1787 CODE_LABEL_NUMBER (insn));
1788 break;
1789 }
1790 }
1791 }
1792 if (insn == NULL_RTX)
1793 warning ("handler not found %d",
1794 CODE_LABEL_NUMBER (XEXP (handler, 0)));
1795 }
1796
1797 /* Now go through and make sure that for each region there is a
1798 corresponding label. */
1799 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1800 {
1801 if (GET_CODE (insn) == NOTE
1802 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1803 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1804 {
1805 for (handler = exception_handler_labels;
1806 handler;
1807 handler = XEXP (handler, 1))
1808 {
1809 if (CODE_LABEL_NUMBER (XEXP (handler, 0))
1810 == NOTE_BLOCK_NUMBER (insn))
1811 break;
1812 }
1813 if (handler == NULL_RTX)
1814 warning ("region exists, no handler %d",
1815 NOTE_BLOCK_NUMBER (insn));
1816 }
1817 }
1818 }
1819 \f
1820 /* This group of functions initializes the exception handling data
1821 structures at the start of the compilation, initializes the data
1822 structures at the start of a function, and saves and restores the
1823 exception handling data structures for the start/end of a nested
1824 function. */
1825
1826 /* Toplevel initialization for EH things. */
1827
1828 void
1829 init_eh ()
1830 {
1831 /* Generate rtl to reference the variable in which the PC of the
1832 current context is saved. */
1833 tree type = build_pointer_type (make_node (VOID_TYPE));
1834 }
1835
1836 /* Initialize the per-function EH information. */
1837
1838 void
1839 init_eh_for_function ()
1840 {
1841 ehstack.top = 0;
1842 ehqueue.head = ehqueue.tail = 0;
1843 catch_clauses = NULL_RTX;
1844 false_label_stack = 0;
1845 caught_return_label_stack = 0;
1846 protect_list = NULL_TREE;
1847 current_function_ehc = NULL_RTX;
1848 }
1849
1850 /* Save some of the per-function EH info into the save area denoted by
1851 P.
1852
1853 This is currently called from save_stmt_status. */
1854
1855 void
1856 save_eh_status (p)
1857 struct function *p;
1858 {
1859 if (p == NULL)
1860 abort ();
1861
1862 p->ehstack = ehstack;
1863 p->ehqueue = ehqueue;
1864 p->catch_clauses = catch_clauses;
1865 p->false_label_stack = false_label_stack;
1866 p->caught_return_label_stack = caught_return_label_stack;
1867 p->protect_list = protect_list;
1868 p->ehc = current_function_ehc;
1869
1870 init_eh ();
1871 }
1872
1873 /* Restore the per-function EH info saved into the area denoted by P.
1874
1875 This is currently called from restore_stmt_status. */
1876
1877 void
1878 restore_eh_status (p)
1879 struct function *p;
1880 {
1881 if (p == NULL)
1882 abort ();
1883
1884 protect_list = p->protect_list;
1885 caught_return_label_stack = p->caught_return_label_stack;
1886 false_label_stack = p->false_label_stack;
1887 catch_clauses = p->catch_clauses;
1888 ehqueue = p->ehqueue;
1889 ehstack = p->ehstack;
1890 current_function_ehc = p->ehc;
1891 }
1892 \f
1893 /* This section is for the exception handling specific optimization
1894 pass. First are the internal routines, and then the main
1895 optimization pass. */
1896
1897 /* Determine if the given INSN can throw an exception. */
1898
1899 static int
1900 can_throw (insn)
1901 rtx insn;
1902 {
1903 /* Calls can always potentially throw exceptions. */
1904 if (GET_CODE (insn) == CALL_INSN)
1905 return 1;
1906
1907 if (asynchronous_exceptions)
1908 {
1909 /* If we wanted asynchronous exceptions, then everything but NOTEs
1910 and CODE_LABELs could throw. */
1911 if (GET_CODE (insn) != NOTE && GET_CODE (insn) != CODE_LABEL)
1912 return 1;
1913 }
1914
1915 return 0;
1916 }
1917
1918 /* Scan a exception region looking for the matching end and then
1919 remove it if possible. INSN is the start of the region, N is the
1920 region number, and DELETE_OUTER is to note if anything in this
1921 region can throw.
1922
1923 Regions are removed if they cannot possibly catch an exception.
1924 This is determined by invoking can_throw on each insn within the
1925 region; if can_throw returns true for any of the instructions, the
1926 region can catch an exception, since there is an insn within the
1927 region that is capable of throwing an exception.
1928
1929 Returns the NOTE_INSN_EH_REGION_END corresponding to this region, or
1930 calls abort if it can't find one.
1931
1932 Can abort if INSN is not a NOTE_INSN_EH_REGION_BEGIN, or if N doesn't
1933 correspond to the region number, or if DELETE_OUTER is NULL. */
1934
1935 static rtx
1936 scan_region (insn, n, delete_outer)
1937 rtx insn;
1938 int n;
1939 int *delete_outer;
1940 {
1941 rtx start = insn;
1942
1943 /* Assume we can delete the region. */
1944 int delete = 1;
1945
1946 if (insn == NULL_RTX
1947 || GET_CODE (insn) != NOTE
1948 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_EH_REGION_BEG
1949 || NOTE_BLOCK_NUMBER (insn) != n
1950 || delete_outer == NULL)
1951 abort ();
1952
1953 insn = NEXT_INSN (insn);
1954
1955 /* Look for the matching end. */
1956 while (! (GET_CODE (insn) == NOTE
1957 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1958 {
1959 /* If anything can throw, we can't remove the region. */
1960 if (delete && can_throw (insn))
1961 {
1962 delete = 0;
1963 }
1964
1965 /* Watch out for and handle nested regions. */
1966 if (GET_CODE (insn) == NOTE
1967 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
1968 {
1969 insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &delete);
1970 }
1971
1972 insn = NEXT_INSN (insn);
1973 }
1974
1975 /* The _BEG/_END NOTEs must match and nest. */
1976 if (NOTE_BLOCK_NUMBER (insn) != n)
1977 abort ();
1978
1979 /* If anything in this exception region can throw, we can throw. */
1980 if (! delete)
1981 *delete_outer = 0;
1982 else
1983 {
1984 /* Delete the start and end of the region. */
1985 delete_insn (start);
1986 delete_insn (insn);
1987
1988 /* Only do this part if we have built the exception handler
1989 labels. */
1990 if (exception_handler_labels)
1991 {
1992 rtx x, *prev = &exception_handler_labels;
1993
1994 /* Find it in the list of handlers. */
1995 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1996 {
1997 rtx label = XEXP (x, 0);
1998 if (CODE_LABEL_NUMBER (label) == n)
1999 {
2000 /* If we are the last reference to the handler,
2001 delete it. */
2002 if (--LABEL_NUSES (label) == 0)
2003 delete_insn (label);
2004
2005 if (optimize)
2006 {
2007 /* Remove it from the list of exception handler
2008 labels, if we are optimizing. If we are not, then
2009 leave it in the list, as we are not really going to
2010 remove the region. */
2011 *prev = XEXP (x, 1);
2012 XEXP (x, 1) = 0;
2013 XEXP (x, 0) = 0;
2014 }
2015
2016 break;
2017 }
2018 prev = &XEXP (x, 1);
2019 }
2020 }
2021 }
2022 return insn;
2023 }
2024
2025 /* Perform various interesting optimizations for exception handling
2026 code.
2027
2028 We look for empty exception regions and make them go (away). The
2029 jump optimization code will remove the handler if nothing else uses
2030 it. */
2031
2032 void
2033 exception_optimize ()
2034 {
2035 rtx insn, regions = NULL_RTX;
2036 int n;
2037
2038 /* Remove empty regions. */
2039 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2040 {
2041 if (GET_CODE (insn) == NOTE
2042 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2043 {
2044 /* Since scan_region will return the NOTE_INSN_EH_REGION_END
2045 insn, we will indirectly skip through all the insns
2046 inbetween. We are also guaranteed that the value of insn
2047 returned will be valid, as otherwise scan_region won't
2048 return. */
2049 insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &n);
2050 }
2051 }
2052 }
2053 \f
2054 /* Various hooks for the DWARF 2 __throw routine. */
2055
2056 /* Do any necessary initialization to access arbitrary stack frames.
2057 On the SPARC, this means flushing the register windows. */
2058
2059 void
2060 expand_builtin_unwind_init ()
2061 {
2062 /* Set this so all the registers get saved in our frame; we need to be
2063 able to copy the saved values for any registers from frames we unwind. */
2064 current_function_has_nonlocal_label = 1;
2065
2066 #ifdef SETUP_FRAME_ADDRESSES
2067 SETUP_FRAME_ADDRESSES ();
2068 #endif
2069 }
2070
2071 /* Given a value extracted from the return address register or stack slot,
2072 return the actual address encoded in that value. */
2073
2074 rtx
2075 expand_builtin_extract_return_addr (addr_tree)
2076 tree addr_tree;
2077 {
2078 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2079 return eh_outer_context (addr);
2080 }
2081
2082 /* Given an actual address in addr_tree, do any necessary encoding
2083 and return the value to be stored in the return address register or
2084 stack slot so the epilogue will return to that address. */
2085
2086 rtx
2087 expand_builtin_frob_return_addr (addr_tree)
2088 tree addr_tree;
2089 {
2090 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2091 #ifdef RETURN_ADDR_OFFSET
2092 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2093 #endif
2094 return addr;
2095 }
2096
2097 /* Given an actual address in addr_tree, set the return address register up
2098 so the epilogue will return to that address. If the return address is
2099 not in a register, do nothing. */
2100
2101 void
2102 expand_builtin_set_return_addr_reg (addr_tree)
2103 tree addr_tree;
2104 {
2105 rtx tmp;
2106 rtx ra = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
2107 0, hard_frame_pointer_rtx);
2108
2109 if (GET_CODE (ra) != REG || REGNO (ra) >= FIRST_PSEUDO_REGISTER)
2110 return;
2111
2112 tmp = force_operand (expand_builtin_frob_return_addr (addr_tree), ra);
2113 if (tmp != ra)
2114 emit_move_insn (ra, tmp);
2115 }
2116
2117 /* Choose two registers for communication between the main body of
2118 __throw and the stub for adjusting the stack pointer. The first register
2119 is used to pass the address of the exception handler; the second register
2120 is used to pass the stack pointer offset.
2121
2122 For register 1 we use the return value register for a void *.
2123 For register 2 we use the static chain register if it exists and is
2124 different from register 1, otherwise some arbitrary call-clobbered
2125 register. */
2126
2127 static void
2128 eh_regs (r1, r2, outgoing)
2129 rtx *r1, *r2;
2130 int outgoing;
2131 {
2132 rtx reg1, reg2;
2133
2134 #ifdef FUNCTION_OUTGOING_VALUE
2135 if (outgoing)
2136 reg1 = FUNCTION_OUTGOING_VALUE (build_pointer_type (void_type_node),
2137 current_function_decl);
2138 else
2139 #endif
2140 reg1 = FUNCTION_VALUE (build_pointer_type (void_type_node),
2141 current_function_decl);
2142
2143 #ifdef STATIC_CHAIN_REGNUM
2144 if (outgoing)
2145 reg2 = static_chain_incoming_rtx;
2146 else
2147 reg2 = static_chain_rtx;
2148 if (REGNO (reg2) == REGNO (reg1))
2149 #endif /* STATIC_CHAIN_REGNUM */
2150 reg2 = NULL_RTX;
2151
2152 if (reg2 == NULL_RTX)
2153 {
2154 int i;
2155 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
2156 if (call_used_regs[i] && ! fixed_regs[i] && i != REGNO (reg1))
2157 {
2158 reg2 = gen_rtx (REG, Pmode, i);
2159 break;
2160 }
2161
2162 if (reg2 == NULL_RTX)
2163 abort ();
2164 }
2165
2166 *r1 = reg1;
2167 *r2 = reg2;
2168 }
2169
2170 /* Emit inside of __throw a stub which adjusts the stack pointer and jumps
2171 to the exception handler. __throw will set up the necessary values
2172 and then return to the stub. */
2173
2174 rtx
2175 expand_builtin_eh_stub ()
2176 {
2177 rtx stub_start = gen_label_rtx ();
2178 rtx after_stub = gen_label_rtx ();
2179 rtx handler, offset, temp;
2180
2181 emit_jump (after_stub);
2182 emit_label (stub_start);
2183
2184 eh_regs (&handler, &offset, 0);
2185
2186 adjust_stack (offset);
2187 emit_indirect_jump (handler);
2188
2189 emit_label (after_stub);
2190 return gen_rtx (LABEL_REF, Pmode, stub_start);
2191 }
2192
2193 /* Set up the registers for passing the handler address and stack offset
2194 to the stub above. */
2195
2196 void
2197 expand_builtin_set_eh_regs (handler, offset)
2198 tree handler, offset;
2199 {
2200 rtx reg1, reg2;
2201
2202 eh_regs (&reg1, &reg2, 1);
2203
2204 store_expr (offset, reg2, 0);
2205 store_expr (handler, reg1, 0);
2206
2207 /* These will be used by the stub. */
2208 emit_insn (gen_rtx (USE, VOIDmode, reg1));
2209 emit_insn (gen_rtx (USE, VOIDmode, reg2));
2210 }
This page took 0.134971 seconds and 6 git commands to generate.