source: trunk/third/gcc/except.c @ 11288

Revision 11288, 73.5 KB checked in by ghudson, 26 years ago (diff)
This commit was generated by cvs2svn to compensate for changes in r11287, which included commits to RCS files with non-trunk default branches.
Line 
1/* Implements exception handling.
2   Copyright (C) 1989, 92-97, 1998 Free Software Foundation, Inc.
3   Contributed by Mike Stump <mrs@cygnus.com>.
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING.  If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA.  */
21
22
23/* An exception is an event that can be signaled from within a
24   function. This event can then be "caught" or "trapped" by the
25   callers of this function. This potentially allows program flow to
26   be transferred to any arbitrary code associated with a function call
27   several levels up the stack.
28
29   The intended use for this mechanism is for signaling "exceptional
30   events" in an out-of-band fashion, hence its name. The C++ language
31   (and many other OO-styled or functional languages) practically
32   requires such a mechanism, as otherwise it becomes very difficult
33   or even impossible to signal failure conditions in complex
34   situations.  The traditional C++ example is when an error occurs in
35   the process of constructing an object; without such a mechanism, it
36   is impossible to signal that the error occurs without adding global
37   state variables and error checks around every object construction.
38
39   The act of causing this event to occur is referred to as "throwing
40   an exception". (Alternate terms include "raising an exception" or
41   "signaling an exception".) The term "throw" is used because control
42   is returned to the callers of the function that is signaling the
43   exception, and thus there is the concept of "throwing" the
44   exception up the call stack.
45
46   There are two major codegen options for exception handling.  The
47   flag -fsjlj-exceptions can be used to select the setjmp/longjmp
48   approach, which is the default.  -fno-sjlj-exceptions can be used to
49   get the PC range table approach.  While this is a compile time
50   flag, an entire application must be compiled with the same codegen
51   option.  The first is a PC range table approach, the second is a
52   setjmp/longjmp based scheme.  We will first discuss the PC range
53   table approach, after that, we will discuss the setjmp/longjmp
54   based approach.
55
56   It is appropriate to speak of the "context of a throw". This
57   context refers to the address where the exception is thrown from,
58   and is used to determine which exception region will handle the
59   exception.
60
61   Regions of code within a function can be marked such that if it
62   contains the context of a throw, control will be passed to a
63   designated "exception handler". These areas are known as "exception
64   regions".  Exception regions cannot overlap, but they can be nested
65   to any arbitrary depth. Also, exception regions cannot cross
66   function boundaries.
67
68   Exception handlers can either be specified by the user (which we
69   will call a "user-defined handler") or generated by the compiler
70   (which we will designate as a "cleanup"). Cleanups are used to
71   perform tasks such as destruction of objects allocated on the
72   stack.
73
74   In the current implementation, cleanups are handled by allocating an
75   exception region for the area that the cleanup is designated for,
76   and the handler for the region performs the cleanup and then
77   rethrows the exception to the outer exception region. From the
78   standpoint of the current implementation, there is little
79   distinction made between a cleanup and a user-defined handler, and
80   the phrase "exception handler" can be used to refer to either one
81   equally well. (The section "Future Directions" below discusses how
82   this will change).
83
84   Each object file that is compiled with exception handling contains
85   a static array of exception handlers named __EXCEPTION_TABLE__.
86   Each entry contains the starting and ending addresses of the
87   exception region, and the address of the handler designated for
88   that region.
89
90   If the target does not use the DWARF 2 frame unwind information, at
91   program startup each object file invokes a function named
92   __register_exceptions with the address of its local
93   __EXCEPTION_TABLE__. __register_exceptions is defined in libgcc2.c, and
94   is responsible for recording all of the exception regions into one list
95   (which is kept in a static variable named exception_table_list).
96
97   On targets that support crtstuff.c, the unwind information
98   is stored in a section named .eh_frame and the information for the
99   entire shared object or program is registered with a call to
100   __register_frame_info.  On other targets, the information for each
101   translation unit is registered from the file generated by collect2.
102   __register_frame_info is defined in frame.c, and is responsible for
103   recording all of the unwind regions into one list (which is kept in a
104   static variable named unwind_table_list).
105
106   The function __throw is actually responsible for doing the
107   throw. On machines that have unwind info support, __throw is generated
108   by code in libgcc2.c, otherwise __throw is generated on a
109   per-object-file basis for each source file compiled with
110   -fexceptions by the the C++ frontend.  Before __throw is invoked,
111   the current context of the throw needs to be placed in the global
112   variable __eh_pc.
113
114   __throw attempts to find the appropriate exception handler for the
115   PC value stored in __eh_pc by calling __find_first_exception_table_match
116   (which is defined in libgcc2.c). If __find_first_exception_table_match
117   finds a relevant handler, __throw transfers control directly to it.
118
119   If a handler for the context being thrown from can't be found, __throw
120   walks (see Walking the stack below) the stack up the dynamic call chain to
121   continue searching for an appropriate exception handler based upon the
122   caller of the function it last sought a exception handler for.  It stops
123   then either an exception handler is found, or when the top of the
124   call chain is reached.
125
126   If no handler is found, an external library function named
127   __terminate is called.  If a handler is found, then we restart
128   our search for a handler at the end of the call chain, and repeat
129   the search process, but instead of just walking up the call chain,
130   we unwind the call chain as we walk up it.
131
132   Internal implementation details:
133
134   To associate a user-defined handler with a block of statements, the
135   function expand_start_try_stmts is used to mark the start of the
136   block of statements with which the handler is to be associated
137   (which is known as a "try block"). All statements that appear
138   afterwards will be associated with the try block.
139
140   A call to expand_start_all_catch marks the end of the try block,
141   and also marks the start of the "catch block" (the user-defined
142   handler) associated with the try block.
143
144   This user-defined handler will be invoked for *every* exception
145   thrown with the context of the try block. It is up to the handler
146   to decide whether or not it wishes to handle any given exception,
147   as there is currently no mechanism in this implementation for doing
148   this. (There are plans for conditionally processing an exception
149   based on its "type", which will provide a language-independent
150   mechanism).
151
152   If the handler chooses not to process the exception (perhaps by
153   looking at an "exception type" or some other additional data
154   supplied with the exception), it can fall through to the end of the
155   handler. expand_end_all_catch and expand_leftover_cleanups
156   add additional code to the end of each handler to take care of
157   rethrowing to the outer exception handler.
158
159   The handler also has the option to continue with "normal flow of
160   code", or in other words to resume executing at the statement
161   immediately after the end of the exception region. The variable
162   caught_return_label_stack contains a stack of labels, and jumping
163   to the topmost entry's label via expand_goto will resume normal
164   flow to the statement immediately after the end of the exception
165   region. If the handler falls through to the end, the exception will
166   be rethrown to the outer exception region.
167
168   The instructions for the catch block are kept as a separate
169   sequence, and will be emitted at the end of the function along with
170   the handlers specified via expand_eh_region_end. The end of the
171   catch block is marked with expand_end_all_catch.
172
173   Any data associated with the exception must currently be handled by
174   some external mechanism maintained in the frontend.  For example,
175   the C++ exception mechanism passes an arbitrary value along with
176   the exception, and this is handled in the C++ frontend by using a
177   global variable to hold the value. (This will be changing in the
178   future.)
179
180   The mechanism in C++ for handling data associated with the
181   exception is clearly not thread-safe. For a thread-based
182   environment, another mechanism must be used (possibly using a
183   per-thread allocation mechanism if the size of the area that needs
184   to be allocated isn't known at compile time.)
185
186   Internally-generated exception regions (cleanups) are marked by
187   calling expand_eh_region_start to mark the start of the region,
188   and expand_eh_region_end (handler) is used to both designate the
189   end of the region and to associate a specified handler/cleanup with
190   the region. The rtl code in HANDLER will be invoked whenever an
191   exception occurs in the region between the calls to
192   expand_eh_region_start and expand_eh_region_end. After HANDLER is
193   executed, additional code is emitted to handle rethrowing the
194   exception to the outer exception handler. The code for HANDLER will
195   be emitted at the end of the function.
196
197   TARGET_EXPRs can also be used to designate exception regions. A
198   TARGET_EXPR gives an unwind-protect style interface commonly used
199   in functional languages such as LISP. The associated expression is
200   evaluated, and whether or not it (or any of the functions that it
201   calls) throws an exception, the protect expression is always
202   invoked. This implementation takes care of the details of
203   associating an exception table entry with the expression and
204   generating the necessary code (it actually emits the protect
205   expression twice, once for normal flow and once for the exception
206   case). As for the other handlers, the code for the exception case
207   will be emitted at the end of the function.
208
209   Cleanups can also be specified by using add_partial_entry (handler)
210   and end_protect_partials. add_partial_entry creates the start of
211   a new exception region; HANDLER will be invoked if an exception is
212   thrown with the context of the region between the calls to
213   add_partial_entry and end_protect_partials. end_protect_partials is
214   used to mark the end of these regions. add_partial_entry can be
215   called as many times as needed before calling end_protect_partials.
216   However, end_protect_partials should only be invoked once for each
217   group of calls to add_partial_entry as the entries are queued
218   and all of the outstanding entries are processed simultaneously
219   when end_protect_partials is invoked. Similarly to the other
220   handlers, the code for HANDLER will be emitted at the end of the
221   function.
222
223   The generated RTL for an exception region includes
224   NOTE_INSN_EH_REGION_BEG and NOTE_INSN_EH_REGION_END notes that mark
225   the start and end of the exception region. A unique label is also
226   generated at the start of the exception region, which is available
227   by looking at the ehstack variable. The topmost entry corresponds
228   to the current region.
229
230   In the current implementation, an exception can only be thrown from
231   a function call (since the mechanism used to actually throw an
232   exception involves calling __throw).  If an exception region is
233   created but no function calls occur within that region, the region
234   can be safely optimized away (along with its exception handlers)
235   since no exceptions can ever be caught in that region.  This
236   optimization is performed unless -fasynchronous-exceptions is
237   given.  If the user wishes to throw from a signal handler, or other
238   asynchronous place, -fasynchronous-exceptions should be used when
239   compiling for maximally correct code, at the cost of additional
240   exception regions.  Using -fasynchronous-exceptions only produces
241   code that is reasonably safe in such situations, but a correct
242   program cannot rely upon this working.  It can be used in failsafe
243   code, where trying to continue on, and proceeding with potentially
244   incorrect results is better than halting the program.
245
246
247   Walking the stack:
248
249   The stack is walked by starting with a pointer to the current
250   frame, and finding the pointer to the callers frame.  The unwind info
251   tells __throw how to find it.
252
253   Unwinding the stack:
254
255   When we use the term unwinding the stack, we mean undoing the
256   effects of the function prologue in a controlled fashion so that we
257   still have the flow of control.  Otherwise, we could just return
258   (jump to the normal end of function epilogue).
259
260   This is done in __throw in libgcc2.c when we know that a handler exists
261   in a frame higher up the call stack than its immediate caller.
262
263   To unwind, we find the unwind data associated with the frame, if any.
264   If we don't find any, we call the library routine __terminate.  If we do
265   find it, we use the information to copy the saved register values from
266   that frame into the register save area in the frame for __throw, return
267   into a stub which updates the stack pointer, and jump to the handler.
268   The normal function epilogue for __throw handles restoring the saved
269   values into registers.
270
271   When unwinding, we use this method if we know it will
272   work (if DWARF2_UNWIND_INFO is defined).  Otherwise, we know that
273   an inline unwinder will have been emitted for any function that
274   __unwind_function cannot unwind.  The inline unwinder appears as a
275   normal exception handler for the entire function, for any function
276   that we know cannot be unwound by __unwind_function.  We inform the
277   compiler of whether a function can be unwound with
278   __unwind_function by having DOESNT_NEED_UNWINDER evaluate to true
279   when the unwinder isn't needed.  __unwind_function is used as an
280   action of last resort.  If no other method can be used for
281   unwinding, __unwind_function is used.  If it cannot unwind, it
282   should call __terminate.
283
284   By default, if the target-specific backend doesn't supply a definition
285   for __unwind_function and doesn't support DWARF2_UNWIND_INFO, inlined
286   unwinders will be used instead. The main tradeoff here is in text space
287   utilization.  Obviously, if inline unwinders have to be generated
288   repeatedly, this uses much more space than if a single routine is used.
289
290   However, it is simply not possible on some platforms to write a
291   generalized routine for doing stack unwinding without having some
292   form of additional data associated with each function.  The current
293   implementation can encode this data in the form of additional
294   machine instructions or as static data in tabular form.  The later
295   is called the unwind data.
296
297   The backend macro DOESNT_NEED_UNWINDER is used to conditionalize whether
298   or not per-function unwinders are needed. If DOESNT_NEED_UNWINDER is
299   defined and has a non-zero value, a per-function unwinder is not emitted
300   for the current function.  If the static unwind data is supported, then
301   a per-function unwinder is not emitted.
302
303   On some platforms it is possible that neither __unwind_function
304   nor inlined unwinders are available. For these platforms it is not
305   possible to throw through a function call, and abort will be
306   invoked instead of performing the throw.
307
308   The reason the unwind data may be needed is that on some platforms
309   the order and types of data stored on the stack can vary depending
310   on the type of function, its arguments and returned values, and the
311   compilation options used (optimization versus non-optimization,
312   -fomit-frame-pointer, processor variations, etc).
313
314   Unfortunately, this also means that throwing through functions that
315   aren't compiled with exception handling support will still not be
316   possible on some platforms. This problem is currently being
317   investigated, but no solutions have been found that do not imply
318   some unacceptable performance penalties.
319
320   Future directions:
321
322   Currently __throw makes no differentiation between cleanups and
323   user-defined exception regions. While this makes the implementation
324   simple, it also implies that it is impossible to determine if a
325   user-defined exception handler exists for a given exception without
326   completely unwinding the stack in the process. This is undesirable
327   from the standpoint of debugging, as ideally it would be possible
328   to trap unhandled exceptions in the debugger before the process of
329   unwinding has even started.
330
331   This problem can be solved by marking user-defined handlers in a
332   special way (probably by adding additional bits to exception_table_list).
333   A two-pass scheme could then be used by __throw to iterate
334   through the table. The first pass would search for a relevant
335   user-defined handler for the current context of the throw, and if
336   one is found, the second pass would then invoke all needed cleanups
337   before jumping to the user-defined handler.
338
339   Many languages (including C++ and Ada) make execution of a
340   user-defined handler conditional on the "type" of the exception
341   thrown. (The type of the exception is actually the type of the data
342   that is thrown with the exception.) It will thus be necessary for
343   __throw to be able to determine if a given user-defined
344   exception handler will actually be executed, given the type of
345   exception.
346
347   One scheme is to add additional information to exception_table_list
348   as to the types of exceptions accepted by each handler. __throw
349   can do the type comparisons and then determine if the handler is
350   actually going to be executed.
351
352   There is currently no significant level of debugging support
353   available, other than to place a breakpoint on __throw. While
354   this is sufficient in most cases, it would be helpful to be able to
355   know where a given exception was going to be thrown to before it is
356   actually thrown, and to be able to choose between stopping before
357   every exception region (including cleanups), or just user-defined
358   exception regions. This should be possible to do in the two-pass
359   scheme by adding additional labels to __throw for appropriate
360   breakpoints, and additional debugger commands could be added to
361   query various state variables to determine what actions are to be
362   performed next.
363
364   Another major problem that is being worked on is the issue with stack
365   unwinding on various platforms. Currently the only platforms that have
366   support for the generation of a generic unwinder are the SPARC and MIPS.
367   All other ports require per-function unwinders, which produce large
368   amounts of code bloat.
369
370   For setjmp/longjmp based exception handling, some of the details
371   are as above, but there are some additional details.  This section
372   discusses the details.
373
374   We don't use NOTE_INSN_EH_REGION_{BEG,END} pairs.  We don't
375   optimize EH regions yet.  We don't have to worry about machine
376   specific issues with unwinding the stack, as we rely upon longjmp
377   for all the machine specific details.  There is no variable context
378   of a throw, just the one implied by the dynamic handler stack
379   pointed to by the dynamic handler chain.  There is no exception
380   table, and no calls to __register_exceptions.  __sjthrow is used
381   instead of __throw, and it works by using the dynamic handler
382   chain, and longjmp.  -fasynchronous-exceptions has no effect, as
383   the elimination of trivial exception regions is not yet performed.
384
385   A frontend can set protect_cleanup_actions_with_terminate when all
386   the cleanup actions should be protected with an EH region that
387   calls terminate when an unhandled exception is throw.  C++ does
388   this, Ada does not.  */
389
390
391#include "config.h"
392#include "defaults.h"
393#include <stdio.h>
394#include "rtl.h"
395#include "tree.h"
396#include "flags.h"
397#include "except.h"
398#include "function.h"
399#include "insn-flags.h"
400#include "expr.h"
401#include "insn-codes.h"
402#include "regs.h"
403#include "hard-reg-set.h"
404#include "insn-config.h"
405#include "recog.h"
406#include "output.h"
407
408/* One to use setjmp/longjmp method of generating code for exception
409   handling.  */
410
411int exceptions_via_longjmp = 2;
412
413/* One to enable asynchronous exception support.  */
414
415int asynchronous_exceptions = 0;
416
417/* One to protect cleanup actions with a handler that calls
418   __terminate, zero otherwise.  */
419
420int protect_cleanup_actions_with_terminate = 0;
421
422/* A list of labels used for exception handlers.  Created by
423   find_exception_handler_labels for the optimization passes.  */
424
425rtx exception_handler_labels;
426
427/* Nonzero means that __throw was invoked.
428
429   This is used by the C++ frontend to know if code needs to be emitted
430   for __throw or not.  */
431
432int throw_used;
433
434/* The dynamic handler chain.  Nonzero if the function has already
435   fetched a pointer to the dynamic handler chain for exception
436   handling.  */
437
438rtx current_function_dhc;
439
440/* The dynamic cleanup chain.  Nonzero if the function has already
441   fetched a pointer to the dynamic cleanup chain for exception
442   handling.  */
443
444rtx current_function_dcc;
445
446/* A stack used for keeping track of the currently active exception
447   handling region.  As each exception region is started, an entry
448   describing the region is pushed onto this stack.  The current
449   region can be found by looking at the top of the stack, and as we
450   exit regions, the corresponding entries are popped.
451
452   Entries cannot overlap; they can be nested. So there is only one
453   entry at most that corresponds to the current instruction, and that
454   is the entry on the top of the stack.  */
455
456static struct eh_stack ehstack;
457
458/* A queue used for tracking which exception regions have closed but
459   whose handlers have not yet been expanded. Regions are emitted in
460   groups in an attempt to improve paging performance.
461
462   As we exit a region, we enqueue a new entry. The entries are then
463   dequeued during expand_leftover_cleanups and expand_start_all_catch,
464
465   We should redo things so that we either take RTL for the handler,
466   or we expand the handler expressed as a tree immediately at region
467   end time.  */
468
469static struct eh_queue ehqueue;
470
471/* Insns for all of the exception handlers for the current function.
472   They are currently emitted by the frontend code.  */
473
474rtx catch_clauses;
475
476/* A TREE_CHAINed list of handlers for regions that are not yet
477   closed. The TREE_VALUE of each entry contains the handler for the
478   corresponding entry on the ehstack.  */
479
480static tree protect_list;
481
482/* Stacks to keep track of various labels.  */
483
484/* Keeps track of the label to resume to should one want to resume
485   normal control flow out of a handler (instead of, say, returning to
486   the caller of the current function or exiting the program).  */
487
488struct label_node *caught_return_label_stack = NULL;
489
490/* Keeps track of the label used as the context of a throw to rethrow an
491   exception to the outer exception region.  */
492
493struct label_node *outer_context_label_stack = NULL;
494
495/* A random data area for the front end's own use.  */
496
497struct label_node *false_label_stack = NULL;
498
499/* The rtx and the tree for the saved PC value.  */
500
501rtx eh_saved_pc_rtx;
502tree eh_saved_pc;
503
504rtx expand_builtin_return_addr  PROTO((enum built_in_function, int, rtx));
505
506/* Various support routines to manipulate the various data structures
507   used by the exception handling code.  */
508
509/* Push a label entry onto the given STACK.  */
510
511void
512push_label_entry (stack, rlabel, tlabel)
513     struct label_node **stack;
514     rtx rlabel;
515     tree tlabel;
516{
517  struct label_node *newnode
518    = (struct label_node *) xmalloc (sizeof (struct label_node));
519
520  if (rlabel)
521    newnode->u.rlabel = rlabel;
522  else
523    newnode->u.tlabel = tlabel;
524  newnode->chain = *stack;
525  *stack = newnode;
526}
527
528/* Pop a label entry from the given STACK.  */
529
530rtx
531pop_label_entry (stack)
532     struct label_node **stack;
533{
534  rtx label;
535  struct label_node *tempnode;
536
537  if (! *stack)
538    return NULL_RTX;
539
540  tempnode = *stack;
541  label = tempnode->u.rlabel;
542  *stack = (*stack)->chain;
543  free (tempnode);
544
545  return label;
546}
547
548/* Return the top element of the given STACK.  */
549
550tree
551top_label_entry (stack)
552     struct label_node **stack;
553{
554  if (! *stack)
555    return NULL_TREE;
556
557  return (*stack)->u.tlabel;
558}
559
560/* Make a copy of ENTRY using xmalloc to allocate the space.  */
561
562static struct eh_entry *
563copy_eh_entry (entry)
564     struct eh_entry *entry;
565{
566  struct eh_entry *newentry;
567
568  newentry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
569  bcopy ((char *) entry, (char *) newentry, sizeof (struct eh_entry));
570
571  return newentry;
572}
573
574/* Push a new eh_node entry onto STACK.  */
575
576static void
577push_eh_entry (stack)
578     struct eh_stack *stack;
579{
580  struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
581  struct eh_entry *entry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
582
583  entry->outer_context = gen_label_rtx ();
584  entry->exception_handler_label = gen_label_rtx ();
585  entry->finalization = NULL_TREE;
586
587  node->entry = entry;
588  node->chain = stack->top;
589  stack->top = node;
590}
591
592/* Pop an entry from the given STACK.  */
593
594static struct eh_entry *
595pop_eh_entry (stack)
596     struct eh_stack *stack;
597{
598  struct eh_node *tempnode;
599  struct eh_entry *tempentry;
600 
601  tempnode = stack->top;
602  tempentry = tempnode->entry;
603  stack->top = stack->top->chain;
604  free (tempnode);
605
606  return tempentry;
607}
608
609/* Enqueue an ENTRY onto the given QUEUE.  */
610
611static void
612enqueue_eh_entry (queue, entry)
613     struct eh_queue *queue;
614     struct eh_entry *entry;
615{
616  struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
617
618  node->entry = entry;
619  node->chain = NULL;
620
621  if (queue->head == NULL)
622    {
623      queue->head = node;
624    }
625  else
626    {
627      queue->tail->chain = node;
628    }
629  queue->tail = node;
630}
631
632/* Dequeue an entry from the given QUEUE.  */
633
634static struct eh_entry *
635dequeue_eh_entry (queue)
636     struct eh_queue *queue;
637{
638  struct eh_node *tempnode;
639  struct eh_entry *tempentry;
640
641  if (queue->head == NULL)
642    return NULL;
643
644  tempnode = queue->head;
645  queue->head = queue->head->chain;
646
647  tempentry = tempnode->entry;
648  free (tempnode);
649
650  return tempentry;
651}
652
653/* Routine to see if exception exception handling is turned on.
654   DO_WARN is non-zero if we want to inform the user that exception
655   handling is turned off.
656
657   This is used to ensure that -fexceptions has been specified if the
658   compiler tries to use any exception-specific functions.  */
659
660int
661doing_eh (do_warn)
662     int do_warn;
663{
664  if (! flag_exceptions)
665    {
666      static int warned = 0;
667      if (! warned && do_warn)
668        {
669          error ("exception handling disabled, use -fexceptions to enable");
670          warned = 1;
671        }
672      return 0;
673    }
674  return 1;
675}
676
677/* Given a return address in ADDR, determine the address we should use
678   to find the corresponding EH region.  */
679
680rtx
681eh_outer_context (addr)
682     rtx addr;
683{
684  /* First mask out any unwanted bits.  */
685#ifdef MASK_RETURN_ADDR
686  expand_and (addr, MASK_RETURN_ADDR, addr);
687#endif
688
689  /* Then adjust to find the real return address.  */
690#if defined (RETURN_ADDR_OFFSET)
691  addr = plus_constant (addr, RETURN_ADDR_OFFSET);
692#endif
693
694  return addr;
695}
696
697/* Start a new exception region for a region of code that has a
698   cleanup action and push the HANDLER for the region onto
699   protect_list. All of the regions created with add_partial_entry
700   will be ended when end_protect_partials is invoked.  */
701
702void
703add_partial_entry (handler)
704     tree handler;
705{
706  expand_eh_region_start ();
707
708  /* Make sure the entry is on the correct obstack.  */
709  push_obstacks_nochange ();
710  resume_temporary_allocation ();
711
712  /* Because this is a cleanup action, we may have to protect the handler
713     with __terminate.  */
714  handler = protect_with_terminate (handler);
715
716  protect_list = tree_cons (NULL_TREE, handler, protect_list);
717  pop_obstacks ();
718}
719
720/* Get a reference to the dynamic handler chain.  It points to the
721   pointer to the next element in the dynamic handler chain.  It ends
722   when there are no more elements in the dynamic handler chain, when
723   the value is &top_elt from libgcc2.c.  Immediately after the
724   pointer, is an area suitable for setjmp/longjmp when
725   DONT_USE_BUILTIN_SETJMP is defined, and an area suitable for
726   __builtin_setjmp/__builtin_longjmp when DONT_USE_BUILTIN_SETJMP
727   isn't defined.
728
729   This routine is here to facilitate the porting of this code to
730   systems with threads.  One can either replace the routine we emit a
731   call for here in libgcc2.c, or one can modify this routine to work
732   with their thread system.
733
734   Ideally, we really only want one per real function, not one
735   per inlined function.  */
736
737rtx
738get_dynamic_handler_chain ()
739{
740  static tree fn;
741  tree expr;
742  rtx insns;
743
744  if (current_function_dhc)
745    return current_function_dhc;
746
747  if (fn == NULL_TREE)
748    {
749      tree fntype;
750      fn = get_identifier ("__get_dynamic_handler_chain");
751      push_obstacks_nochange ();
752      end_temporary_allocation ();
753      fntype = build_pointer_type (build_pointer_type
754                                   (build_pointer_type (void_type_node)));
755      fntype = build_function_type (fntype, NULL_TREE);
756      fn = build_decl (FUNCTION_DECL, fn, fntype);
757      DECL_EXTERNAL (fn) = 1;
758      TREE_PUBLIC (fn) = 1;
759      DECL_ARTIFICIAL (fn) = 1;
760      TREE_READONLY (fn) = 1;
761      make_decl_rtl (fn, NULL_PTR, 1);
762      assemble_external (fn);
763      pop_obstacks ();
764    }
765
766  expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
767  expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
768                expr, NULL_TREE, NULL_TREE);
769  TREE_SIDE_EFFECTS (expr) = 1;
770  expr = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (expr)), expr);
771
772  start_sequence ();
773  current_function_dhc = expand_expr (expr, NULL_RTX, VOIDmode, 0);
774  insns = get_insns ();
775  end_sequence ();
776  emit_insns_before (insns, get_first_nonparm_insn ());
777
778  return current_function_dhc;
779}
780
781/* Get a reference to the dynamic cleanup chain.  It points to the
782   pointer to the next element in the dynamic cleanup chain.
783   Immediately after the pointer, are two Pmode variables, one for a
784   pointer to a function that performs the cleanup action, and the
785   second, the argument to pass to that function.  */
786
787rtx
788get_dynamic_cleanup_chain ()
789{
790  rtx dhc, dcc;
791
792  dhc = get_dynamic_handler_chain ();
793  dcc = plus_constant (dhc, GET_MODE_SIZE (Pmode));
794
795  current_function_dcc = copy_to_reg (dcc);
796
797  /* We don't want a copy of the dcc, but rather, the single dcc.  */
798  return gen_rtx (MEM, Pmode, current_function_dcc);
799}
800
801/* Generate code to evaluate X and jump to LABEL if the value is nonzero.
802   LABEL is an rtx of code CODE_LABEL, in this function.  */
803
804void
805jumpif_rtx (x, label)
806     rtx x;
807     rtx label;
808{
809  jumpif (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
810}
811
812/* Generate code to evaluate X and jump to LABEL if the value is zero.
813   LABEL is an rtx of code CODE_LABEL, in this function.  */
814
815void
816jumpifnot_rtx (x, label)
817     rtx x;
818     rtx label;
819{
820  jumpifnot (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
821}
822
823/* Start a dynamic cleanup on the EH runtime dynamic cleanup stack.
824   We just need to create an element for the cleanup list, and push it
825   into the chain.
826
827   A dynamic cleanup is a cleanup action implied by the presence of an
828   element on the EH runtime dynamic cleanup stack that is to be
829   performed when an exception is thrown.  The cleanup action is
830   performed by __sjthrow when an exception is thrown.  Only certain
831   actions can be optimized into dynamic cleanup actions.  For the
832   restrictions on what actions can be performed using this routine,
833   see expand_eh_region_start_tree.  */
834
835static void
836start_dynamic_cleanup (func, arg)
837     tree func;
838     tree arg;
839{
840  rtx dhc, dcc;
841  rtx new_func, new_arg;
842  rtx x, buf;
843  int size;
844
845  /* We allocate enough room for a pointer to the function, and
846     one argument.  */
847  size = 2;
848
849  /* XXX, FIXME: The stack space allocated this way is too long lived,
850     but there is no allocation routine that allocates at the level of
851     the last binding contour.  */
852  buf = assign_stack_local (BLKmode,
853                            GET_MODE_SIZE (Pmode)*(size+1),
854                            0);
855
856  buf = change_address (buf, Pmode, NULL_RTX);
857
858  /* Store dcc into the first word of the newly allocated buffer.  */
859
860  dcc = get_dynamic_cleanup_chain ();
861  emit_move_insn (buf, dcc);
862
863  /* Store func and arg into the cleanup list element.  */
864
865  new_func = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
866                                                 GET_MODE_SIZE (Pmode)));
867  new_arg = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
868                                                GET_MODE_SIZE (Pmode)*2));
869  x = expand_expr (func, new_func, Pmode, 0);
870  if (x != new_func)
871    emit_move_insn (new_func, x);
872
873  x = expand_expr (arg, new_arg, Pmode, 0);
874  if (x != new_arg)
875    emit_move_insn (new_arg, x);
876
877  /* Update the cleanup chain.  */
878
879  emit_move_insn (dcc, XEXP (buf, 0));
880}
881
882/* Emit RTL to start a dynamic handler on the EH runtime dynamic
883   handler stack.  This should only be used by expand_eh_region_start
884   or expand_eh_region_start_tree.  */
885
886static void
887start_dynamic_handler ()
888{
889  rtx dhc, dcc;
890  rtx x, arg, buf;
891  int size;
892
893#ifndef DONT_USE_BUILTIN_SETJMP
894  /* The number of Pmode words for the setjmp buffer, when using the
895     builtin setjmp/longjmp, see expand_builtin, case
896     BUILT_IN_LONGJMP.  */
897  size = 5;
898#else
899#ifdef JMP_BUF_SIZE
900  size = JMP_BUF_SIZE;
901#else
902  /* Should be large enough for most systems, if it is not,
903     JMP_BUF_SIZE should be defined with the proper value.  It will
904     also tend to be larger than necessary for most systems, a more
905     optimal port will define JMP_BUF_SIZE.  */
906  size = FIRST_PSEUDO_REGISTER+2;
907#endif
908#endif
909  /* XXX, FIXME: The stack space allocated this way is too long lived,
910     but there is no allocation routine that allocates at the level of
911     the last binding contour.  */
912  arg = assign_stack_local (BLKmode,
913                            GET_MODE_SIZE (Pmode)*(size+1),
914                            0);
915
916  arg = change_address (arg, Pmode, NULL_RTX);
917
918  /* Store dhc into the first word of the newly allocated buffer.  */
919
920  dhc = get_dynamic_handler_chain ();
921  dcc = gen_rtx (MEM, Pmode, plus_constant (XEXP (arg, 0),
922                                            GET_MODE_SIZE (Pmode)));
923  emit_move_insn (arg, dhc);
924
925  /* Zero out the start of the cleanup chain.  */
926  emit_move_insn (dcc, const0_rtx);
927
928  /* The jmpbuf starts two words into the area allocated.  */
929  buf = plus_constant (XEXP (arg, 0), GET_MODE_SIZE (Pmode)*2);
930
931#ifdef DONT_USE_BUILTIN_SETJMP
932  x = emit_library_call_value (setjmp_libfunc, NULL_RTX, 1, SImode, 1,
933                               buf, Pmode);
934#else
935  x = expand_builtin_setjmp (buf, NULL_RTX);
936#endif
937
938  /* If we come back here for a catch, transfer control to the
939     handler.  */
940
941  jumpif_rtx (x, ehstack.top->entry->exception_handler_label);
942
943  /* We are committed to this, so update the handler chain.  */
944
945  emit_move_insn (dhc, XEXP (arg, 0));
946}
947
948/* Start an exception handling region for the given cleanup action.
949   All instructions emitted after this point are considered to be part
950   of the region until expand_eh_region_end is invoked.  CLEANUP is
951   the cleanup action to perform.  The return value is true if the
952   exception region was optimized away.  If that case,
953   expand_eh_region_end does not need to be called for this cleanup,
954   nor should it be.
955
956   This routine notices one particular common case in C++ code
957   generation, and optimizes it so as to not need the exception
958   region.  It works by creating a dynamic cleanup action, instead of
959   of a using an exception region.  */
960
961int
962expand_eh_region_start_tree (decl, cleanup)
963     tree decl;
964     tree cleanup;
965{
966  rtx note;
967
968  /* This is the old code.  */
969  if (! doing_eh (0))
970    return 0;
971
972  /* The optimization only applies to actions protected with
973     terminate, and only applies if we are using the setjmp/longjmp
974     codegen method.  */
975  if (exceptions_via_longjmp
976      && protect_cleanup_actions_with_terminate)
977    {
978      tree func, arg;
979      tree args;
980
981      /* Ignore any UNSAVE_EXPR.  */
982      if (TREE_CODE (cleanup) == UNSAVE_EXPR)
983        cleanup = TREE_OPERAND (cleanup, 0);
984     
985      /* Further, it only applies if the action is a call, if there
986         are 2 arguments, and if the second argument is 2.  */
987
988      if (TREE_CODE (cleanup) == CALL_EXPR
989          && (args = TREE_OPERAND (cleanup, 1))
990          && (func = TREE_OPERAND (cleanup, 0))
991          && (arg = TREE_VALUE (args))
992          && (args = TREE_CHAIN (args))
993
994          /* is the second argument 2?  */
995          && TREE_CODE (TREE_VALUE (args)) == INTEGER_CST
996          && TREE_INT_CST_LOW (TREE_VALUE (args)) == 2
997          && TREE_INT_CST_HIGH (TREE_VALUE (args)) == 0
998
999          /* Make sure there are no other arguments.  */
1000          && TREE_CHAIN (args) == NULL_TREE)
1001        {
1002          /* Arrange for returns and gotos to pop the entry we make on the
1003             dynamic cleanup stack.  */
1004          expand_dcc_cleanup (decl);
1005          start_dynamic_cleanup (func, arg);
1006          return 1;
1007        }
1008    }
1009
1010  expand_eh_region_start_for_decl (decl);
1011  ehstack.top->entry->finalization = cleanup;
1012
1013  return 0;
1014}
1015
1016/* Just like expand_eh_region_start, except if a cleanup action is
1017   entered on the cleanup chain, the TREE_PURPOSE of the element put
1018   on the chain is DECL.  DECL should be the associated VAR_DECL, if
1019   any, otherwise it should be NULL_TREE.  */
1020
1021void
1022expand_eh_region_start_for_decl (decl)
1023     tree decl;
1024{
1025  rtx note;
1026
1027  /* This is the old code.  */
1028  if (! doing_eh (0))
1029    return;
1030
1031  if (exceptions_via_longjmp)
1032    {
1033      /* We need a new block to record the start and end of the
1034         dynamic handler chain.  We could always do this, but we
1035         really want to permit jumping into such a block, and we want
1036         to avoid any errors or performance impact in the SJ EH code
1037         for now.  */
1038      expand_start_bindings (0);
1039
1040      /* But we don't need or want a new temporary level.  */
1041      pop_temp_slots ();
1042
1043      /* Mark this block as created by expand_eh_region_start.  This
1044         is so that we can pop the block with expand_end_bindings
1045         automatically.  */
1046      mark_block_as_eh_region ();
1047
1048      /* Arrange for returns and gotos to pop the entry we make on the
1049         dynamic handler stack.  */
1050      expand_dhc_cleanup (decl);
1051    }
1052
1053  if (exceptions_via_longjmp == 0)
1054    note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG);
1055  push_eh_entry (&ehstack);
1056  if (exceptions_via_longjmp == 0)
1057    NOTE_BLOCK_NUMBER (note)
1058      = CODE_LABEL_NUMBER (ehstack.top->entry->exception_handler_label);
1059  if (exceptions_via_longjmp)
1060    start_dynamic_handler ();
1061}
1062
1063/* Start an exception handling region.  All instructions emitted after
1064   this point are considered to be part of the region until
1065   expand_eh_region_end is invoked.  */
1066
1067void
1068expand_eh_region_start ()
1069{
1070  expand_eh_region_start_for_decl (NULL_TREE);
1071}
1072
1073/* End an exception handling region.  The information about the region
1074   is found on the top of ehstack.
1075
1076   HANDLER is either the cleanup for the exception region, or if we're
1077   marking the end of a try block, HANDLER is integer_zero_node.
1078
1079   HANDLER will be transformed to rtl when expand_leftover_cleanups
1080   is invoked.  */
1081
1082void
1083expand_eh_region_end (handler)
1084     tree handler;
1085{
1086  struct eh_entry *entry;
1087
1088  if (! doing_eh (0))
1089    return;
1090
1091  entry = pop_eh_entry (&ehstack);
1092
1093  if (exceptions_via_longjmp == 0)
1094    {
1095      rtx label;
1096      rtx note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END);
1097      NOTE_BLOCK_NUMBER (note) = CODE_LABEL_NUMBER (entry->exception_handler_label);
1098
1099      label = gen_label_rtx ();
1100      emit_jump (label);
1101
1102      /* Emit a label marking the end of this exception region that
1103         is used for rethrowing into the outer context.  */
1104      emit_label (entry->outer_context);
1105
1106      /* Put in something that takes up space, as otherwise the end
1107         address for this EH region could have the exact same address as
1108         its outer region. This would cause us to miss the fact that
1109         resuming exception handling with this PC value would be inside
1110         the outer region.  */
1111      emit_insn (gen_nop ());
1112      emit_barrier ();
1113      emit_label (label);
1114    }
1115
1116  entry->finalization = handler;
1117
1118  enqueue_eh_entry (&ehqueue, entry);
1119
1120  /* If we have already started ending the bindings, don't recurse.
1121     This only happens when exceptions_via_longjmp is true.  */
1122  if (is_eh_region ())
1123    {
1124      /* Because we don't need or want a new temporary level and
1125         because we didn't create one in expand_eh_region_start,
1126         create a fake one now to avoid removing one in
1127         expand_end_bindings.  */
1128      push_temp_slots ();
1129
1130      mark_block_as_not_eh_region ();
1131
1132      /* Maybe do this to prevent jumping in and so on...  */
1133      expand_end_bindings (NULL_TREE, 0, 0);
1134    }
1135}
1136
1137/* End the EH region for a goto fixup.  We only need them in the region-based
1138   EH scheme.  */
1139
1140void
1141expand_fixup_region_start ()
1142{
1143  if (! doing_eh (0) || exceptions_via_longjmp)
1144    return;
1145
1146  expand_eh_region_start ();
1147}
1148
1149/* End the EH region for a goto fixup.  CLEANUP is the cleanup we just
1150   expanded; to avoid running it twice if it throws, we look through the
1151   ehqueue for a matching region and rethrow from its outer_context.  */
1152
1153void
1154expand_fixup_region_end (cleanup)
1155     tree cleanup;
1156{
1157  tree t;
1158  struct eh_node *node;
1159  int yes;
1160
1161  if (! doing_eh (0) || exceptions_via_longjmp)
1162    return;
1163
1164  for (node = ehstack.top; node && node->entry->finalization != cleanup; )
1165    node = node->chain;
1166  if (node == 0)
1167    for (node = ehqueue.head; node && node->entry->finalization != cleanup; )
1168      node = node->chain;
1169  if (node == 0)
1170    abort ();
1171
1172  yes = suspend_momentary ();
1173
1174  t = build (RTL_EXPR, void_type_node, NULL_RTX, const0_rtx);
1175  TREE_SIDE_EFFECTS (t) = 1;
1176  do_pending_stack_adjust ();
1177  start_sequence_for_rtl_expr (t);
1178  expand_internal_throw (node->entry->outer_context);
1179  do_pending_stack_adjust ();
1180  RTL_EXPR_SEQUENCE (t) = get_insns ();
1181  end_sequence ();
1182
1183  resume_momentary (yes);
1184
1185  expand_eh_region_end (t);
1186}
1187
1188/* If we are using the setjmp/longjmp EH codegen method, we emit a
1189   call to __sjthrow.
1190
1191   Otherwise, we emit a call to __throw and note that we threw
1192   something, so we know we need to generate the necessary code for
1193   __throw.
1194
1195   Before invoking throw, the __eh_pc variable must have been set up
1196   to contain the PC being thrown from. This address is used by
1197   __throw to determine which exception region (if any) is
1198   responsible for handling the exception.  */
1199
1200void
1201emit_throw ()
1202{
1203  if (exceptions_via_longjmp)
1204    {
1205      emit_library_call (sjthrow_libfunc, 0, VOIDmode, 0);
1206    }
1207  else
1208    {
1209#ifdef JUMP_TO_THROW
1210      emit_indirect_jump (throw_libfunc);
1211#else
1212#ifndef DWARF2_UNWIND_INFO
1213      /* Prevent assemble_external from doing anything with this symbol.  */
1214      SYMBOL_REF_USED (throw_libfunc) = 1;
1215#endif
1216      emit_library_call (throw_libfunc, 0, VOIDmode, 0);
1217#endif
1218      throw_used = 1;
1219    }
1220  emit_barrier ();
1221}
1222
1223/* An internal throw with an indirect CONTEXT we want to throw from.
1224   CONTEXT evaluates to the context of the throw.  */
1225
1226static void
1227expand_internal_throw_indirect (context)
1228     rtx context;
1229{
1230  assemble_external (eh_saved_pc);
1231  emit_move_insn (eh_saved_pc_rtx, context);
1232  emit_throw ();
1233}
1234
1235/* An internal throw with a direct CONTEXT we want to throw from.
1236   CONTEXT must be a label; its address will be used as the context of
1237   the throw.  */
1238
1239void
1240expand_internal_throw (context)
1241     rtx context;
1242{
1243  expand_internal_throw_indirect (gen_rtx (LABEL_REF, Pmode, context));
1244}
1245
1246/* Called from expand_exception_blocks and expand_end_catch_block to
1247   emit any pending handlers/cleanups queued from expand_eh_region_end.  */
1248
1249void
1250expand_leftover_cleanups ()
1251{
1252  struct eh_entry *entry;
1253
1254  while ((entry = dequeue_eh_entry (&ehqueue)) != 0)
1255    {
1256      rtx prev;
1257
1258      /* A leftover try block. Shouldn't be one here.  */
1259      if (entry->finalization == integer_zero_node)
1260        abort ();
1261
1262      /* Output the label for the start of the exception handler.  */
1263      emit_label (entry->exception_handler_label);
1264
1265#ifdef HAVE_exception_receiver
1266      if (! exceptions_via_longjmp)
1267        if (HAVE_exception_receiver)
1268          emit_insn (gen_exception_receiver ());
1269#endif
1270
1271#ifdef HAVE_nonlocal_goto_receiver
1272      if (! exceptions_via_longjmp)
1273        if (HAVE_nonlocal_goto_receiver)
1274          emit_insn (gen_nonlocal_goto_receiver ());
1275#endif
1276
1277      /* And now generate the insns for the handler.  */
1278      expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1279
1280      prev = get_last_insn ();
1281      if (prev == NULL || GET_CODE (prev) != BARRIER)
1282        {
1283          if (exceptions_via_longjmp)
1284            emit_throw ();
1285          else
1286            {
1287              /* The below can be optimized away, and we could just
1288                 fall into the next EH handler, if we are certain they
1289                 are nested.  */
1290              /* Emit code to throw to the outer context if we fall off
1291                 the end of the handler.  */
1292              expand_internal_throw (entry->outer_context);
1293            }
1294        }
1295
1296      do_pending_stack_adjust ();
1297      free (entry);
1298    }
1299}
1300
1301/* Called at the start of a block of try statements.  */
1302void
1303expand_start_try_stmts ()
1304{
1305  if (! doing_eh (1))
1306    return;
1307
1308  expand_eh_region_start ();
1309}
1310
1311/* Generate RTL for the start of a group of catch clauses.
1312
1313   It is responsible for starting a new instruction sequence for the
1314   instructions in the catch block, and expanding the handlers for the
1315   internally-generated exception regions nested within the try block
1316   corresponding to this catch block.  */
1317
1318void
1319expand_start_all_catch ()
1320{
1321  struct eh_entry *entry;
1322  tree label;
1323
1324  if (! doing_eh (1))
1325    return;
1326
1327  push_label_entry (&outer_context_label_stack,
1328                    ehstack.top->entry->outer_context, NULL_TREE);
1329
1330  /* End the try block.  */
1331  expand_eh_region_end (integer_zero_node);
1332
1333  emit_line_note (input_filename, lineno);
1334  label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1335
1336  /* The label for the exception handling block that we will save.
1337     This is Lresume in the documentation.  */
1338  expand_label (label);
1339 
1340  if (exceptions_via_longjmp == 0)
1341    {
1342      /* Put in something that takes up space, as otherwise the end
1343         address for the EH region could have the exact same address as
1344         the outer region, causing us to miss the fact that resuming
1345         exception handling with this PC value would be inside the outer
1346         region.  */
1347      emit_insn (gen_nop ());
1348    }
1349
1350  /* Push the label that points to where normal flow is resumed onto
1351     the top of the label stack.  */
1352  push_label_entry (&caught_return_label_stack, NULL_RTX, label);
1353
1354  /* Start a new sequence for all the catch blocks.  We will add this
1355     to the global sequence catch_clauses when we have completed all
1356     the handlers in this handler-seq.  */
1357  start_sequence ();
1358
1359  while (1)
1360    {
1361      rtx prev;
1362
1363      entry = dequeue_eh_entry (&ehqueue);
1364      /* Emit the label for the exception handler for this region, and
1365         expand the code for the handler.
1366
1367         Note that a catch region is handled as a side-effect here;
1368         for a try block, entry->finalization will contain
1369         integer_zero_node, so no code will be generated in the
1370         expand_expr call below. But, the label for the handler will
1371         still be emitted, so any code emitted after this point will
1372         end up being the handler.  */
1373      emit_label (entry->exception_handler_label);
1374
1375#ifdef HAVE_exception_receiver
1376      if (! exceptions_via_longjmp)
1377        if (HAVE_exception_receiver)
1378          emit_insn (gen_exception_receiver ());
1379#endif
1380
1381#ifdef HAVE_nonlocal_goto_receiver
1382      if (! exceptions_via_longjmp)
1383        if (HAVE_nonlocal_goto_receiver)
1384          emit_insn (gen_nonlocal_goto_receiver ());
1385#endif
1386
1387      /* When we get down to the matching entry for this try block, stop.  */
1388      if (entry->finalization == integer_zero_node)
1389        {
1390          /* Don't forget to free this entry.  */
1391          free (entry);
1392          break;
1393        }
1394
1395      /* And now generate the insns for the handler.  */
1396      expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
1397
1398      prev = get_last_insn ();
1399      if (prev == NULL || GET_CODE (prev) != BARRIER)
1400        {
1401          if (exceptions_via_longjmp)
1402            emit_throw ();
1403          else
1404            {
1405              /* Code to throw out to outer context when we fall off end
1406                 of the handler. We can't do this here for catch blocks,
1407                 so it's done in expand_end_all_catch instead.
1408
1409                 The below can be optimized away (and we could just fall
1410                 into the next EH handler) if we are certain they are
1411                 nested.  */
1412
1413              expand_internal_throw (entry->outer_context);
1414            }
1415        }
1416      do_pending_stack_adjust ();
1417      free (entry);
1418    }
1419}
1420
1421/* Finish up the catch block.  At this point all the insns for the
1422   catch clauses have already been generated, so we only have to add
1423   them to the catch_clauses list. We also want to make sure that if
1424   we fall off the end of the catch clauses that we rethrow to the
1425   outer EH region.  */
1426
1427void
1428expand_end_all_catch ()
1429{
1430  rtx new_catch_clause;
1431
1432  if (! doing_eh (1))
1433    return;
1434
1435  if (exceptions_via_longjmp)
1436    emit_throw ();
1437  else
1438    {
1439      /* Code to throw out to outer context, if we fall off end of catch
1440         handlers.  This is rethrow (Lresume, same id, same obj) in the
1441         documentation. We use Lresume because we know that it will throw
1442         to the correct context.
1443
1444         In other words, if the catch handler doesn't exit or return, we
1445         do a "throw" (using the address of Lresume as the point being
1446         thrown from) so that the outer EH region can then try to process
1447         the exception.  */
1448
1449      expand_internal_throw (outer_context_label_stack->u.rlabel);
1450    }
1451
1452  /* Now we have the complete catch sequence.  */
1453  new_catch_clause = get_insns ();
1454  end_sequence ();
1455 
1456  /* This level of catch blocks is done, so set up the successful
1457     catch jump label for the next layer of catch blocks.  */
1458  pop_label_entry (&caught_return_label_stack);
1459  pop_label_entry (&outer_context_label_stack);
1460
1461  /* Add the new sequence of catches to the main one for this function.  */
1462  push_to_sequence (catch_clauses);
1463  emit_insns (new_catch_clause);
1464  catch_clauses = get_insns ();
1465  end_sequence ();
1466 
1467  /* Here we fall through into the continuation code.  */
1468}
1469
1470/* End all the pending exception regions on protect_list. The handlers
1471   will be emitted when expand_leftover_cleanups is invoked.  */
1472
1473void
1474end_protect_partials ()
1475{
1476  while (protect_list)
1477    {
1478      expand_eh_region_end (TREE_VALUE (protect_list));
1479      protect_list = TREE_CHAIN (protect_list);
1480    }
1481}
1482
1483/* Arrange for __terminate to be called if there is an unhandled throw
1484   from within E.  */
1485
1486tree
1487protect_with_terminate (e)
1488     tree e;
1489{
1490  /* We only need to do this when using setjmp/longjmp EH and the
1491     language requires it, as otherwise we protect all of the handlers
1492     at once, if we need to.  */
1493  if (exceptions_via_longjmp && protect_cleanup_actions_with_terminate)
1494    {
1495      tree handler, result;
1496
1497      /* All cleanups must be on the function_obstack.  */
1498      push_obstacks_nochange ();
1499      resume_temporary_allocation ();
1500
1501      handler = make_node (RTL_EXPR);
1502      TREE_TYPE (handler) = void_type_node;
1503      RTL_EXPR_RTL (handler) = const0_rtx;
1504      TREE_SIDE_EFFECTS (handler) = 1;
1505      start_sequence_for_rtl_expr (handler);
1506
1507      emit_library_call (terminate_libfunc, 0, VOIDmode, 0);
1508      emit_barrier ();
1509
1510      RTL_EXPR_SEQUENCE (handler) = get_insns ();
1511      end_sequence ();
1512       
1513      result = build (TRY_CATCH_EXPR, TREE_TYPE (e), e, handler);
1514      TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
1515      TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
1516      TREE_READONLY (result) = TREE_READONLY (e);
1517
1518      pop_obstacks ();
1519
1520      e = result;
1521    }
1522
1523  return e;
1524}
1525
1526/* The exception table that we build that is used for looking up and
1527   dispatching exceptions, the current number of entries, and its
1528   maximum size before we have to extend it.
1529
1530   The number in eh_table is the code label number of the exception
1531   handler for the region. This is added by add_eh_table_entry and
1532   used by output_exception_table_entry.  */
1533
1534static int *eh_table;
1535static int eh_table_size;
1536static int eh_table_max_size;
1537
1538/* Note the need for an exception table entry for region N.  If we
1539   don't need to output an explicit exception table, avoid all of the
1540   extra work.
1541
1542   Called from final_scan_insn when a NOTE_INSN_EH_REGION_BEG is seen.
1543   N is the NOTE_BLOCK_NUMBER of the note, which comes from the code
1544   label number of the exception handler for the region.  */
1545
1546void
1547add_eh_table_entry (n)
1548     int n;
1549{
1550#ifndef OMIT_EH_TABLE
1551  if (eh_table_size >= eh_table_max_size)
1552    {
1553      if (eh_table)
1554        {
1555          eh_table_max_size += eh_table_max_size>>1;
1556
1557          if (eh_table_max_size < 0)
1558            abort ();
1559
1560          eh_table = (int *) xrealloc (eh_table,
1561                                       eh_table_max_size * sizeof (int));
1562        }
1563      else
1564        {
1565          eh_table_max_size = 252;
1566          eh_table = (int *) xmalloc (eh_table_max_size * sizeof (int));
1567        }
1568    }
1569  eh_table[eh_table_size++] = n;
1570#endif
1571}
1572
1573/* Return a non-zero value if we need to output an exception table.
1574
1575   On some platforms, we don't have to output a table explicitly.
1576   This routine doesn't mean we don't have one.  */
1577
1578int
1579exception_table_p ()
1580{
1581  if (eh_table)
1582    return 1;
1583
1584  return 0;
1585}
1586
1587/* 1 if we need a static constructor to register EH table info.  */
1588
1589int
1590register_exception_table_p ()
1591{
1592#if defined (DWARF2_UNWIND_INFO)
1593  return 0;
1594#endif
1595
1596  return exception_table_p ();
1597}
1598
1599/* Output the entry of the exception table corresponding to to the
1600   exception region numbered N to file FILE.
1601
1602   N is the code label number corresponding to the handler of the
1603   region.  */
1604
1605static void
1606output_exception_table_entry (file, n)
1607     FILE *file;
1608     int n;
1609{
1610  char buf[256];
1611  rtx sym;
1612
1613  ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n);
1614  sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1615  assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1616
1617  ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n);
1618  sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1619  assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1620
1621  ASM_GENERATE_INTERNAL_LABEL (buf, "L", n);
1622  sym = gen_rtx (SYMBOL_REF, Pmode, buf);
1623  assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
1624
1625  putc ('\n', file);            /* blank line */
1626}
1627
1628/* Output the exception table if we have and need one.  */
1629
1630void
1631output_exception_table ()
1632{
1633  int i;
1634  extern FILE *asm_out_file;
1635
1636  if (! doing_eh (0) || ! eh_table)
1637    return;
1638
1639  exception_section ();
1640
1641  /* Beginning marker for table.  */
1642  assemble_align (GET_MODE_ALIGNMENT (ptr_mode));
1643  assemble_label ("__EXCEPTION_TABLE__");
1644
1645  for (i = 0; i < eh_table_size; ++i)
1646    output_exception_table_entry (asm_out_file, eh_table[i]);
1647
1648  free (eh_table);
1649
1650  /* Ending marker for table.  */
1651  assemble_label ("__EXCEPTION_END__");
1652  assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1653  assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1654  assemble_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
1655  putc ('\n', asm_out_file);            /* blank line */
1656}
1657
1658/* Generate code to initialize the exception table at program startup
1659   time.  */
1660
1661void
1662register_exception_table ()
1663{
1664  emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__register_exceptions"), 0,
1665                     VOIDmode, 1,
1666                     gen_rtx (SYMBOL_REF, Pmode, "__EXCEPTION_TABLE__"),
1667                     Pmode);
1668}
1669
1670/* Emit the RTL for the start of the per-function unwinder for the
1671   current function. See emit_unwinder for further information.
1672
1673   DOESNT_NEED_UNWINDER is a target-specific macro that determines if
1674   the current function actually needs a per-function unwinder or not.
1675   By default, all functions need one.  */
1676
1677void
1678start_eh_unwinder ()
1679{
1680#ifdef DOESNT_NEED_UNWINDER
1681  if (DOESNT_NEED_UNWINDER)
1682    return;
1683#endif
1684
1685  /* If we are using the setjmp/longjmp implementation, we don't need a
1686     per function unwinder.  */
1687
1688  if (exceptions_via_longjmp)
1689    return;
1690
1691#ifdef DWARF2_UNWIND_INFO
1692  return;
1693#endif
1694
1695  expand_eh_region_start ();
1696}
1697
1698/* Emit insns for the end of the per-function unwinder for the
1699   current function.  */
1700
1701void
1702end_eh_unwinder ()
1703{
1704  tree expr;
1705  rtx return_val_rtx, ret_val, label, end, insns;
1706
1707  if (! doing_eh (0))
1708    return;
1709
1710#ifdef DOESNT_NEED_UNWINDER
1711  if (DOESNT_NEED_UNWINDER)
1712    return;
1713#endif
1714
1715  /* If we are using the setjmp/longjmp implementation, we don't need a
1716     per function unwinder.  */
1717
1718  if (exceptions_via_longjmp)
1719    return;
1720
1721#ifdef DWARF2_UNWIND_INFO
1722  return;
1723#else /* DWARF2_UNWIND_INFO */
1724
1725  assemble_external (eh_saved_pc);
1726
1727  expr = make_node (RTL_EXPR);
1728  TREE_TYPE (expr) = void_type_node;
1729  RTL_EXPR_RTL (expr) = const0_rtx;
1730  TREE_SIDE_EFFECTS (expr) = 1;
1731  start_sequence_for_rtl_expr (expr);
1732
1733  /* ret_val will contain the address of the code where the call
1734     to the current function occurred.  */
1735  ret_val = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
1736                                        0, hard_frame_pointer_rtx);
1737  return_val_rtx = copy_to_reg (ret_val);
1738
1739  /* Get the address we need to use to determine what exception
1740     handler should be invoked, and store it in __eh_pc.  */
1741  return_val_rtx = eh_outer_context (return_val_rtx);
1742  return_val_rtx = expand_binop (Pmode, sub_optab, return_val_rtx, GEN_INT (1),
1743                                 NULL_RTX, 0, OPTAB_LIB_WIDEN);
1744  emit_move_insn (eh_saved_pc_rtx, return_val_rtx);
1745 
1746  /* Either set things up so we do a return directly to __throw, or
1747     we return here instead.  */
1748#ifdef JUMP_TO_THROW
1749  emit_move_insn (ret_val, throw_libfunc);
1750#else
1751  label = gen_label_rtx ();
1752  emit_move_insn (ret_val, gen_rtx (LABEL_REF, Pmode, label));
1753#endif
1754
1755#ifdef RETURN_ADDR_OFFSET
1756  return_val_rtx = plus_constant (ret_val, -RETURN_ADDR_OFFSET);
1757  if (return_val_rtx != ret_val)
1758    emit_move_insn (ret_val, return_val_rtx);
1759#endif
1760 
1761  end = gen_label_rtx ();
1762  emit_jump (end); 
1763
1764  RTL_EXPR_SEQUENCE (expr) = get_insns ();
1765  end_sequence ();
1766
1767  expand_eh_region_end (expr);
1768
1769  emit_jump (end);
1770
1771#ifndef JUMP_TO_THROW
1772  emit_label (label);
1773  emit_throw ();
1774#endif
1775 
1776  expand_leftover_cleanups ();
1777
1778  emit_label (end);
1779
1780#ifdef HAVE_return
1781  if (HAVE_return)
1782    {
1783      emit_jump_insn (gen_return ());
1784      emit_barrier ();
1785    }
1786#endif
1787#endif /* DWARF2_UNWIND_INFO */
1788}
1789
1790/* If necessary, emit insns for the per function unwinder for the
1791   current function.  Called after all the code that needs unwind
1792   protection is output. 
1793
1794   The unwinder takes care of catching any exceptions that have not
1795   been previously caught within the function, unwinding the stack to
1796   the next frame, and rethrowing using the address of the current
1797   function's caller as the context of the throw.
1798
1799   On some platforms __throw can do this by itself (or with the help
1800   of __unwind_function) so the per-function unwinder is
1801   unnecessary.
1802 
1803   We cannot place the unwinder into the function until after we know
1804   we are done inlining, as we don't want to have more than one
1805   unwinder per non-inlined function.  */
1806
1807void
1808emit_unwinder ()
1809{
1810  rtx insns, insn;
1811
1812  start_sequence ();
1813  start_eh_unwinder ();
1814  insns = get_insns ();
1815  end_sequence ();
1816
1817  /* We place the start of the exception region associated with the
1818     per function unwinder at the top of the function.  */
1819  if (insns)
1820    emit_insns_after (insns, get_insns ());
1821
1822  start_sequence ();
1823  end_eh_unwinder ();
1824  insns = get_insns ();
1825  end_sequence ();
1826
1827  /* And we place the end of the exception region before the USE and
1828     CLOBBER insns that may come at the end of the function.  */
1829  if (insns == 0)
1830    return;
1831
1832  insn = get_last_insn ();
1833  while (GET_CODE (insn) == NOTE
1834         || (GET_CODE (insn) == INSN
1835             && (GET_CODE (PATTERN (insn)) == USE
1836                 || GET_CODE (PATTERN (insn)) == CLOBBER)))
1837    insn = PREV_INSN (insn);
1838
1839  if (GET_CODE (insn) == CODE_LABEL
1840      && GET_CODE (PREV_INSN (insn)) == BARRIER)
1841    {
1842      insn = PREV_INSN (insn);
1843    }
1844  else
1845    {
1846      rtx label = gen_label_rtx ();
1847      emit_label_after (label, insn);
1848      insn = emit_jump_insn_after (gen_jump (label), insn);
1849      insn = emit_barrier_after (insn);
1850    }
1851   
1852  emit_insns_after (insns, insn);
1853}
1854
1855/* Scan the current insns and build a list of handler labels. The
1856   resulting list is placed in the global variable exception_handler_labels.
1857
1858   It is called after the last exception handling region is added to
1859   the current function (when the rtl is almost all built for the
1860   current function) and before the jump optimization pass.  */
1861
1862void
1863find_exception_handler_labels ()
1864{
1865  rtx insn;
1866  int max_labelno = max_label_num ();
1867  int min_labelno = get_first_label_num ();
1868  rtx *labels;
1869
1870  exception_handler_labels = NULL_RTX;
1871
1872  /* If we aren't doing exception handling, there isn't much to check.  */
1873  if (! doing_eh (0))
1874    return;
1875
1876  /* Generate a handy reference to each label.  */
1877
1878  /* We call xmalloc here instead of alloca; we did the latter in the past,
1879     but found that it can sometimes end up being asked to allocate space
1880     for more than 1 million labels.  */
1881  labels = (rtx *) xmalloc ((max_labelno - min_labelno) * sizeof (rtx));
1882  bzero ((char *) labels, (max_labelno - min_labelno) * sizeof (rtx));
1883
1884  /* Arrange for labels to be indexed directly by CODE_LABEL_NUMBER.  */
1885  labels -= min_labelno;
1886
1887  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1888    {
1889      if (GET_CODE (insn) == CODE_LABEL)
1890        if (CODE_LABEL_NUMBER (insn) >= min_labelno
1891            && CODE_LABEL_NUMBER (insn) < max_labelno)
1892          labels[CODE_LABEL_NUMBER (insn)] = insn;
1893    }
1894
1895  /* For each start of a region, add its label to the list.  */
1896
1897  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1898    {
1899      if (GET_CODE (insn) == NOTE
1900          && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
1901        {
1902          rtx label = NULL_RTX;
1903
1904          if (NOTE_BLOCK_NUMBER (insn) >= min_labelno
1905              && NOTE_BLOCK_NUMBER (insn) < max_labelno)
1906            {
1907              label = labels[NOTE_BLOCK_NUMBER (insn)];
1908
1909              if (label)
1910                exception_handler_labels
1911                  = gen_rtx (EXPR_LIST, VOIDmode,
1912                             label, exception_handler_labels);
1913              else
1914                warning ("didn't find handler for EH region %d",
1915                         NOTE_BLOCK_NUMBER (insn));
1916            }
1917          else
1918            warning ("mismatched EH region %d", NOTE_BLOCK_NUMBER (insn));
1919        }
1920    }
1921
1922  free (labels + min_labelno);
1923}
1924
1925/* Perform sanity checking on the exception_handler_labels list.
1926
1927   Can be called after find_exception_handler_labels is called to
1928   build the list of exception handlers for the current function and
1929   before we finish processing the current function.  */
1930
1931void
1932check_exception_handler_labels ()
1933{
1934  rtx insn, handler;
1935
1936  /* If we aren't doing exception handling, there isn't much to check.  */
1937  if (! doing_eh (0))
1938    return;
1939
1940  /* Ensure that the CODE_LABEL_NUMBER for the CODE_LABEL entry point
1941     in each handler corresponds to the CODE_LABEL_NUMBER of the
1942     handler.  */
1943
1944  for (handler = exception_handler_labels;
1945       handler;
1946       handler = XEXP (handler, 1))
1947    {
1948      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1949        {
1950          if (GET_CODE (insn) == CODE_LABEL)
1951            {
1952              if (CODE_LABEL_NUMBER (insn)
1953                  == CODE_LABEL_NUMBER (XEXP (handler, 0)))
1954                {
1955                  if (insn != XEXP (handler, 0))
1956                    warning ("mismatched handler %d",
1957                             CODE_LABEL_NUMBER (insn));
1958                  break;
1959                }
1960            }
1961        }
1962      if (insn == NULL_RTX)
1963        warning ("handler not found %d",
1964                 CODE_LABEL_NUMBER (XEXP (handler, 0)));
1965    }
1966
1967  /* Now go through and make sure that for each region there is a
1968     corresponding label.  */
1969  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1970    {
1971      if (GET_CODE (insn) == NOTE
1972          && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1973              || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1974        {
1975          for (handler = exception_handler_labels;
1976               handler;
1977               handler = XEXP (handler, 1))
1978            {
1979              if (CODE_LABEL_NUMBER (XEXP (handler, 0))
1980                  == NOTE_BLOCK_NUMBER (insn))
1981                break;
1982            }
1983          if (handler == NULL_RTX)
1984            warning ("region exists, no handler %d",
1985                     NOTE_BLOCK_NUMBER (insn));
1986        }
1987    }
1988}
1989
1990/* This group of functions initializes the exception handling data
1991   structures at the start of the compilation, initializes the data
1992   structures at the start of a function, and saves and restores the
1993   exception handling data structures for the start/end of a nested
1994   function.  */
1995
1996/* Toplevel initialization for EH things.  */
1997
1998void
1999init_eh ()
2000{
2001  /* Generate rtl to reference the variable in which the PC of the
2002     current context is saved.  */
2003  tree type = build_pointer_type (make_node (VOID_TYPE));
2004
2005  eh_saved_pc = build_decl (VAR_DECL, get_identifier ("__eh_pc"), type);
2006  DECL_EXTERNAL (eh_saved_pc) = 1;
2007  TREE_PUBLIC (eh_saved_pc) = 1;
2008  make_decl_rtl (eh_saved_pc, NULL_PTR, 1);
2009  eh_saved_pc_rtx = DECL_RTL (eh_saved_pc);
2010}
2011
2012/* Initialize the per-function EH information.  */
2013
2014void
2015init_eh_for_function ()
2016{
2017  ehstack.top = 0;
2018  ehqueue.head = ehqueue.tail = 0;
2019  catch_clauses = NULL_RTX;
2020  false_label_stack = 0;
2021  caught_return_label_stack = 0;
2022  protect_list = NULL_TREE;
2023  current_function_dhc = NULL_RTX;
2024  current_function_dcc = NULL_RTX;
2025}
2026
2027/* Save some of the per-function EH info into the save area denoted by
2028   P.
2029
2030   This is currently called from save_stmt_status.  */
2031
2032void
2033save_eh_status (p)
2034     struct function *p;
2035{
2036  p->ehstack = ehstack;
2037  p->ehqueue = ehqueue;
2038  p->catch_clauses = catch_clauses;
2039  p->false_label_stack = false_label_stack;
2040  p->caught_return_label_stack = caught_return_label_stack;
2041  p->protect_list = protect_list;
2042  p->dhc = current_function_dhc;
2043  p->dcc = current_function_dcc;
2044
2045  init_eh ();
2046}
2047
2048/* Restore the per-function EH info saved into the area denoted by P. 
2049
2050   This is currently called from restore_stmt_status.  */
2051
2052void
2053restore_eh_status (p)
2054     struct function *p;
2055{
2056  protect_list = p->protect_list;
2057  caught_return_label_stack = p->caught_return_label_stack;
2058  false_label_stack = p->false_label_stack;
2059  catch_clauses = p->catch_clauses;
2060  ehqueue = p->ehqueue;
2061  ehstack = p->ehstack;
2062  current_function_dhc = p->dhc;
2063  current_function_dcc = p->dcc;
2064}
2065
2066/* This section is for the exception handling specific optimization
2067   pass.  First are the internal routines, and then the main
2068   optimization pass.  */
2069
2070/* Determine if the given INSN can throw an exception.  */
2071
2072static int
2073can_throw (insn)
2074     rtx insn;
2075{
2076  /* Calls can always potentially throw exceptions.  */
2077  if (GET_CODE (insn) == CALL_INSN)
2078    return 1;
2079
2080  if (asynchronous_exceptions)
2081    {
2082      /* If we wanted asynchronous exceptions, then everything but NOTEs
2083         and CODE_LABELs could throw.  */
2084      if (GET_CODE (insn) != NOTE && GET_CODE (insn) != CODE_LABEL)
2085        return 1;
2086    }
2087
2088  return 0;
2089}
2090
2091/* Scan a exception region looking for the matching end and then
2092   remove it if possible. INSN is the start of the region, N is the
2093   region number, and DELETE_OUTER is to note if anything in this
2094   region can throw.
2095
2096   Regions are removed if they cannot possibly catch an exception.
2097   This is determined by invoking can_throw on each insn within the
2098   region; if can_throw returns true for any of the instructions, the
2099   region can catch an exception, since there is an insn within the
2100   region that is capable of throwing an exception.
2101
2102   Returns the NOTE_INSN_EH_REGION_END corresponding to this region, or
2103   calls abort if it can't find one.
2104
2105   Can abort if INSN is not a NOTE_INSN_EH_REGION_BEGIN, or if N doesn't
2106   correspond to the region number, or if DELETE_OUTER is NULL.  */
2107
2108static rtx
2109scan_region (insn, n, delete_outer)
2110     rtx insn;
2111     int n;
2112     int *delete_outer;
2113{
2114  rtx start = insn;
2115
2116  /* Assume we can delete the region.  */
2117  int delete = 1;
2118
2119  if (! (GET_CODE (insn) == NOTE
2120         && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
2121         && NOTE_BLOCK_NUMBER (insn) == n
2122         && delete_outer != NULL))
2123    abort ();
2124
2125  insn = NEXT_INSN (insn);
2126
2127  /* Look for the matching end.  */
2128  while (! (GET_CODE (insn) == NOTE
2129            && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
2130    {
2131      /* If anything can throw, we can't remove the region.  */
2132      if (delete && can_throw (insn))
2133        {
2134          delete = 0;
2135        }
2136
2137      /* Watch out for and handle nested regions.  */
2138      if (GET_CODE (insn) == NOTE
2139          && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2140        {
2141          insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &delete);
2142        }
2143
2144      insn = NEXT_INSN (insn);
2145    }
2146
2147  /* The _BEG/_END NOTEs must match and nest.  */
2148  if (NOTE_BLOCK_NUMBER (insn) != n)
2149    abort ();
2150
2151  /* If anything in this exception region can throw, we can throw.  */
2152  if (! delete)
2153    *delete_outer = 0;
2154  else
2155    {
2156      /* Delete the start and end of the region.  */
2157      delete_insn (start);
2158      delete_insn (insn);
2159
2160      /* Only do this part if we have built the exception handler
2161         labels.  */
2162      if (exception_handler_labels)
2163        {
2164          rtx x, *prev = &exception_handler_labels;
2165
2166          /* Find it in the list of handlers.  */
2167          for (x = exception_handler_labels; x; x = XEXP (x, 1))
2168            {
2169              rtx label = XEXP (x, 0);
2170              if (CODE_LABEL_NUMBER (label) == n)
2171                {
2172                  /* If we are the last reference to the handler,
2173                     delete it.  */
2174                  if (--LABEL_NUSES (label) == 0)
2175                    delete_insn (label);
2176
2177                  if (optimize)
2178                    {
2179                      /* Remove it from the list of exception handler
2180                         labels, if we are optimizing.  If we are not, then
2181                         leave it in the list, as we are not really going to
2182                         remove the region.  */
2183                      *prev = XEXP (x, 1);
2184                      XEXP (x, 1) = 0;
2185                      XEXP (x, 0) = 0;
2186                    }
2187
2188                  break;
2189                }
2190              prev = &XEXP (x, 1);
2191            }
2192        }
2193    }
2194  return insn;
2195}
2196
2197/* Perform various interesting optimizations for exception handling
2198   code.
2199
2200   We look for empty exception regions and make them go (away). The
2201   jump optimization code will remove the handler if nothing else uses
2202   it.  */
2203
2204void
2205exception_optimize ()
2206{
2207  rtx insn, regions = NULL_RTX;
2208  int n;
2209
2210  /* The below doesn't apply to setjmp/longjmp EH.  */
2211  if (exceptions_via_longjmp)
2212    return;
2213
2214  /* Remove empty regions.  */
2215  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2216    {
2217      if (GET_CODE (insn) == NOTE
2218          && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2219        {
2220          /* Since scan_region will return the NOTE_INSN_EH_REGION_END
2221             insn, we will indirectly skip through all the insns
2222             inbetween. We are also guaranteed that the value of insn
2223             returned will be valid, as otherwise scan_region won't
2224             return.  */
2225          insn = scan_region (insn, NOTE_BLOCK_NUMBER (insn), &n);
2226        }
2227    }
2228}
2229
2230/* Various hooks for the DWARF 2 __throw routine.  */
2231
2232/* Do any necessary initialization to access arbitrary stack frames.
2233   On the SPARC, this means flushing the register windows.  */
2234
2235void
2236expand_builtin_unwind_init ()
2237{
2238  /* Set this so all the registers get saved in our frame; we need to be
2239     able to copy the saved values for any registers from frames we unwind. */
2240  current_function_has_nonlocal_label = 1;
2241
2242#ifdef SETUP_FRAME_ADDRESSES
2243  SETUP_FRAME_ADDRESSES ();
2244#endif
2245}
2246
2247/* Given a value extracted from the return address register or stack slot,
2248   return the actual address encoded in that value.  */
2249
2250rtx
2251expand_builtin_extract_return_addr (addr_tree)
2252     tree addr_tree;
2253{
2254  rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2255  return eh_outer_context (addr);
2256}
2257
2258/* Given an actual address in addr_tree, do any necessary encoding
2259   and return the value to be stored in the return address register or
2260   stack slot so the epilogue will return to that address.  */
2261
2262rtx
2263expand_builtin_frob_return_addr (addr_tree)
2264     tree addr_tree;
2265{
2266  rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2267#ifdef RETURN_ADDR_OFFSET
2268  addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2269#endif
2270  return addr;
2271}
2272
2273/* Given an actual address in addr_tree, set the return address register up
2274   so the epilogue will return to that address.  If the return address is
2275   not in a register, do nothing.  */
2276
2277void
2278expand_builtin_set_return_addr_reg (addr_tree)
2279     tree addr_tree;
2280{
2281  rtx tmp;
2282  rtx ra = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
2283                                       0, hard_frame_pointer_rtx);
2284
2285  if (GET_CODE (ra) != REG || REGNO (ra) >= FIRST_PSEUDO_REGISTER)
2286    return;
2287
2288  tmp = force_operand (expand_builtin_frob_return_addr (addr_tree), ra);
2289  if (tmp != ra)
2290    emit_move_insn (ra, tmp);
2291}
2292
2293/* Choose two registers for communication between the main body of
2294   __throw and the stub for adjusting the stack pointer.  The first register
2295   is used to pass the address of the exception handler; the second register
2296   is used to pass the stack pointer offset.
2297
2298   For register 1 we use the return value register for a void *.
2299   For register 2 we use the static chain register if it exists and is
2300     different from register 1, otherwise some arbitrary call-clobbered
2301     register.  */
2302
2303static void
2304eh_regs (r1, r2, outgoing)
2305     rtx *r1, *r2;
2306     int outgoing;
2307{
2308  rtx reg1, reg2;
2309
2310#ifdef FUNCTION_OUTGOING_VALUE
2311  if (outgoing)
2312    reg1 = FUNCTION_OUTGOING_VALUE (build_pointer_type (void_type_node),
2313                                    current_function_decl);
2314  else
2315#endif
2316    reg1 = FUNCTION_VALUE (build_pointer_type (void_type_node),
2317                           current_function_decl);
2318
2319#ifdef STATIC_CHAIN_REGNUM
2320  if (outgoing)
2321    reg2 = static_chain_incoming_rtx;
2322  else
2323    reg2 = static_chain_rtx;
2324  if (REGNO (reg2) == REGNO (reg1))
2325#endif /* STATIC_CHAIN_REGNUM */
2326    reg2 = NULL_RTX;
2327
2328  if (reg2 == NULL_RTX)
2329    {
2330      int i;
2331      for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
2332        if (call_used_regs[i] && ! fixed_regs[i] && i != REGNO (reg1))
2333          {
2334            reg2 = gen_rtx (REG, Pmode, i);
2335            break;
2336          }
2337
2338      if (reg2 == NULL_RTX)
2339        abort ();
2340    }
2341
2342  *r1 = reg1;
2343  *r2 = reg2;
2344}
2345
2346/* Emit inside of __throw a stub which adjusts the stack pointer and jumps
2347   to the exception handler.  __throw will set up the necessary values
2348   and then return to the stub.  */
2349
2350rtx
2351expand_builtin_eh_stub ()
2352{
2353  rtx stub_start = gen_label_rtx ();
2354  rtx after_stub = gen_label_rtx ();
2355  rtx handler, offset, temp;
2356
2357  emit_jump (after_stub);
2358  emit_label (stub_start);
2359
2360  eh_regs (&handler, &offset, 0);
2361
2362  adjust_stack (offset);
2363  emit_indirect_jump (handler);
2364
2365  emit_label (after_stub);
2366  return gen_rtx (LABEL_REF, Pmode, stub_start);
2367}
2368
2369/* Set up the registers for passing the handler address and stack offset
2370   to the stub above.  */
2371
2372void
2373expand_builtin_set_eh_regs (handler, offset)
2374     tree handler, offset;
2375{
2376  rtx reg1, reg2;
2377
2378  eh_regs (&reg1, &reg2, 1);
2379
2380  store_expr (offset,  reg2, 0);
2381  store_expr (handler, reg1, 0);
2382
2383  /* These will be used by the stub.  */
2384  emit_insn (gen_rtx (USE, VOIDmode, reg1));
2385  emit_insn (gen_rtx (USE, VOIDmode, reg2));
2386}
Note: See TracBrowser for help on using the repository browser.