aboutsummaryrefslogtreecommitdiff
path: root/gcc/except.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/except.c')
-rw-r--r--gcc/except.c5685
1 files changed, 2935 insertions, 2750 deletions
diff --git a/gcc/except.c b/gcc/except.c
index 4770031220a..d84a03b37ee 100644
--- a/gcc/except.c
+++ b/gcc/except.c
@@ -44,1070 +44,954 @@ Boston, MA 02111-1307, USA. */
exception, and thus there is the concept of "throwing" the
exception up the call stack.
- There are two major codegen options for exception handling. The
- flag -fsjlj-exceptions can be used to select the setjmp/longjmp
- approach, which is the default. -fno-sjlj-exceptions can be used to
- get the PC range table approach. While this is a compile time
- flag, an entire application must be compiled with the same codegen
- option. The first is a PC range table approach, the second is a
- setjmp/longjmp based scheme. We will first discuss the PC range
- table approach, after that, we will discuss the setjmp/longjmp
- based approach.
-
- It is appropriate to speak of the "context of a throw". This
- context refers to the address where the exception is thrown from,
- and is used to determine which exception region will handle the
- exception.
-
- Regions of code within a function can be marked such that if it
- contains the context of a throw, control will be passed to a
- designated "exception handler". These areas are known as "exception
- regions". Exception regions cannot overlap, but they can be nested
- to any arbitrary depth. Also, exception regions cannot cross
- function boundaries.
-
- Exception handlers can either be specified by the user (which we
- will call a "user-defined handler") or generated by the compiler
- (which we will designate as a "cleanup"). Cleanups are used to
- perform tasks such as destruction of objects allocated on the
- stack.
-
- In the current implementation, cleanups are handled by allocating an
- exception region for the area that the cleanup is designated for,
- and the handler for the region performs the cleanup and then
- rethrows the exception to the outer exception region. From the
- standpoint of the current implementation, there is little
- distinction made between a cleanup and a user-defined handler, and
- the phrase "exception handler" can be used to refer to either one
- equally well. (The section "Future Directions" below discusses how
- this will change).
-
- Each object file that is compiled with exception handling contains
- a static array of exception handlers named __EXCEPTION_TABLE__.
- Each entry contains the starting and ending addresses of the
- exception region, and the address of the handler designated for
- that region.
-
- If the target does not use the DWARF 2 frame unwind information, at
- program startup each object file invokes a function named
- __register_exceptions with the address of its local
- __EXCEPTION_TABLE__. __register_exceptions is defined in libgcc2.c, and
- is responsible for recording all of the exception regions into one list
- (which is kept in a static variable named exception_table_list).
-
- On targets that support crtstuff.c, the unwind information
- is stored in a section named .eh_frame and the information for the
- entire shared object or program is registered with a call to
- __register_frame_info. On other targets, the information for each
- translation unit is registered from the file generated by collect2.
- __register_frame_info is defined in frame.c, and is responsible for
- recording all of the unwind regions into one list (which is kept in a
- static variable named unwind_table_list).
-
- The function __throw is actually responsible for doing the
- throw. On machines that have unwind info support, __throw is generated
- by code in libgcc2.c, otherwise __throw is generated on a
- per-object-file basis for each source file compiled with
- -fexceptions by the C++ frontend. Before __throw is invoked,
- the current context of the throw needs to be placed in the global
- variable __eh_pc.
-
- __throw attempts to find the appropriate exception handler for the
- PC value stored in __eh_pc by calling __find_first_exception_table_match
- (which is defined in libgcc2.c). If __find_first_exception_table_match
- finds a relevant handler, __throw transfers control directly to it.
-
- If a handler for the context being thrown from can't be found, __throw
- walks (see Walking the stack below) the stack up the dynamic call chain to
- continue searching for an appropriate exception handler based upon the
- caller of the function it last sought a exception handler for. It stops
- then either an exception handler is found, or when the top of the
- call chain is reached.
-
- If no handler is found, an external library function named
- __terminate is called. If a handler is found, then we restart
- our search for a handler at the end of the call chain, and repeat
- the search process, but instead of just walking up the call chain,
- we unwind the call chain as we walk up it.
-
- Internal implementation details:
-
- To associate a user-defined handler with a block of statements, the
- function expand_start_try_stmts is used to mark the start of the
- block of statements with which the handler is to be associated
- (which is known as a "try block"). All statements that appear
- afterwards will be associated with the try block.
-
- A call to expand_start_all_catch marks the end of the try block,
- and also marks the start of the "catch block" (the user-defined
- handler) associated with the try block.
-
- This user-defined handler will be invoked for *every* exception
- thrown with the context of the try block. It is up to the handler
- to decide whether or not it wishes to handle any given exception,
- as there is currently no mechanism in this implementation for doing
- this. (There are plans for conditionally processing an exception
- based on its "type", which will provide a language-independent
- mechanism).
-
- If the handler chooses not to process the exception (perhaps by
- looking at an "exception type" or some other additional data
- supplied with the exception), it can fall through to the end of the
- handler. expand_end_all_catch and expand_leftover_cleanups
- add additional code to the end of each handler to take care of
- rethrowing to the outer exception handler.
-
- The handler also has the option to continue with "normal flow of
- code", or in other words to resume executing at the statement
- immediately after the end of the exception region. The variable
- caught_return_label_stack contains a stack of labels, and jumping
- to the topmost entry's label via expand_goto will resume normal
- flow to the statement immediately after the end of the exception
- region. If the handler falls through to the end, the exception will
- be rethrown to the outer exception region.
-
- The instructions for the catch block are kept as a separate
- sequence, and will be emitted at the end of the function along with
- the handlers specified via expand_eh_region_end. The end of the
- catch block is marked with expand_end_all_catch.
-
- Any data associated with the exception must currently be handled by
- some external mechanism maintained in the frontend. For example,
- the C++ exception mechanism passes an arbitrary value along with
- the exception, and this is handled in the C++ frontend by using a
- global variable to hold the value. (This will be changing in the
- future.)
-
- The mechanism in C++ for handling data associated with the
- exception is clearly not thread-safe. For a thread-based
- environment, another mechanism must be used (possibly using a
- per-thread allocation mechanism if the size of the area that needs
- to be allocated isn't known at compile time.)
-
- Internally-generated exception regions (cleanups) are marked by
- calling expand_eh_region_start to mark the start of the region,
- and expand_eh_region_end (handler) is used to both designate the
- end of the region and to associate a specified handler/cleanup with
- the region. The rtl code in HANDLER will be invoked whenever an
- exception occurs in the region between the calls to
- expand_eh_region_start and expand_eh_region_end. After HANDLER is
- executed, additional code is emitted to handle rethrowing the
- exception to the outer exception handler. The code for HANDLER will
- be emitted at the end of the function.
-
- TARGET_EXPRs can also be used to designate exception regions. A
- TARGET_EXPR gives an unwind-protect style interface commonly used
- in functional languages such as LISP. The associated expression is
- evaluated, and whether or not it (or any of the functions that it
- calls) throws an exception, the protect expression is always
- invoked. This implementation takes care of the details of
- associating an exception table entry with the expression and
- generating the necessary code (it actually emits the protect
- expression twice, once for normal flow and once for the exception
- case). As for the other handlers, the code for the exception case
- will be emitted at the end of the function.
-
- Cleanups can also be specified by using add_partial_entry (handler)
- and end_protect_partials. add_partial_entry creates the start of
- a new exception region; HANDLER will be invoked if an exception is
- thrown with the context of the region between the calls to
- add_partial_entry and end_protect_partials. end_protect_partials is
- used to mark the end of these regions. add_partial_entry can be
- called as many times as needed before calling end_protect_partials.
- However, end_protect_partials should only be invoked once for each
- group of calls to add_partial_entry as the entries are queued
- and all of the outstanding entries are processed simultaneously
- when end_protect_partials is invoked. Similarly to the other
- handlers, the code for HANDLER will be emitted at the end of the
- function.
-
- The generated RTL for an exception region includes
- NOTE_INSN_EH_REGION_BEG and NOTE_INSN_EH_REGION_END notes that mark
- the start and end of the exception region. A unique label is also
- generated at the start of the exception region, which is available
- by looking at the ehstack variable. The topmost entry corresponds
- to the current region.
-
- In the current implementation, an exception can only be thrown from
- a function call (since the mechanism used to actually throw an
- exception involves calling __throw). If an exception region is
- created but no function calls occur within that region, the region
- can be safely optimized away (along with its exception handlers)
- since no exceptions can ever be caught in that region. This
- optimization is performed unless -fasynchronous-exceptions is
- given. If the user wishes to throw from a signal handler, or other
- asynchronous place, -fasynchronous-exceptions should be used when
- compiling for maximally correct code, at the cost of additional
- exception regions. Using -fasynchronous-exceptions only produces
- code that is reasonably safe in such situations, but a correct
- program cannot rely upon this working. It can be used in failsafe
- code, where trying to continue on, and proceeding with potentially
- incorrect results is better than halting the program.
-
-
- Walking the stack:
-
- The stack is walked by starting with a pointer to the current
- frame, and finding the pointer to the callers frame. The unwind info
- tells __throw how to find it.
-
- Unwinding the stack:
-
- When we use the term unwinding the stack, we mean undoing the
- effects of the function prologue in a controlled fashion so that we
- still have the flow of control. Otherwise, we could just return
- (jump to the normal end of function epilogue).
-
- This is done in __throw in libgcc2.c when we know that a handler exists
- in a frame higher up the call stack than its immediate caller.
-
- To unwind, we find the unwind data associated with the frame, if any.
- If we don't find any, we call the library routine __terminate. If we do
- find it, we use the information to copy the saved register values from
- that frame into the register save area in the frame for __throw, return
- into a stub which updates the stack pointer, and jump to the handler.
- The normal function epilogue for __throw handles restoring the saved
- values into registers.
-
- When unwinding, we use this method if we know it will
- work (if DWARF2_UNWIND_INFO is defined). Otherwise, we know that
- an inline unwinder will have been emitted for any function that
- __unwind_function cannot unwind. The inline unwinder appears as a
- normal exception handler for the entire function, for any function
- that we know cannot be unwound by __unwind_function. We inform the
- compiler of whether a function can be unwound with
- __unwind_function by having DOESNT_NEED_UNWINDER evaluate to true
- when the unwinder isn't needed. __unwind_function is used as an
- action of last resort. If no other method can be used for
- unwinding, __unwind_function is used. If it cannot unwind, it
- should call __terminate.
-
- By default, if the target-specific backend doesn't supply a definition
- for __unwind_function and doesn't support DWARF2_UNWIND_INFO, inlined
- unwinders will be used instead. The main tradeoff here is in text space
- utilization. Obviously, if inline unwinders have to be generated
- repeatedly, this uses much more space than if a single routine is used.
-
- However, it is simply not possible on some platforms to write a
- generalized routine for doing stack unwinding without having some
- form of additional data associated with each function. The current
- implementation can encode this data in the form of additional
- machine instructions or as static data in tabular form. The later
- is called the unwind data.
-
- The backend macro DOESNT_NEED_UNWINDER is used to conditionalize whether
- or not per-function unwinders are needed. If DOESNT_NEED_UNWINDER is
- defined and has a non-zero value, a per-function unwinder is not emitted
- for the current function. If the static unwind data is supported, then
- a per-function unwinder is not emitted.
-
- On some platforms it is possible that neither __unwind_function
- nor inlined unwinders are available. For these platforms it is not
- possible to throw through a function call, and abort will be
- invoked instead of performing the throw.
-
- The reason the unwind data may be needed is that on some platforms
- the order and types of data stored on the stack can vary depending
- on the type of function, its arguments and returned values, and the
- compilation options used (optimization versus non-optimization,
- -fomit-frame-pointer, processor variations, etc).
-
- Unfortunately, this also means that throwing through functions that
- aren't compiled with exception handling support will still not be
- possible on some platforms. This problem is currently being
- investigated, but no solutions have been found that do not imply
- some unacceptable performance penalties.
-
- Future directions:
-
- Currently __throw makes no differentiation between cleanups and
- user-defined exception regions. While this makes the implementation
- simple, it also implies that it is impossible to determine if a
- user-defined exception handler exists for a given exception without
- completely unwinding the stack in the process. This is undesirable
- from the standpoint of debugging, as ideally it would be possible
- to trap unhandled exceptions in the debugger before the process of
- unwinding has even started.
-
- This problem can be solved by marking user-defined handlers in a
- special way (probably by adding additional bits to exception_table_list).
- A two-pass scheme could then be used by __throw to iterate
- through the table. The first pass would search for a relevant
- user-defined handler for the current context of the throw, and if
- one is found, the second pass would then invoke all needed cleanups
- before jumping to the user-defined handler.
-
- Many languages (including C++ and Ada) make execution of a
- user-defined handler conditional on the "type" of the exception
- thrown. (The type of the exception is actually the type of the data
- that is thrown with the exception.) It will thus be necessary for
- __throw to be able to determine if a given user-defined
- exception handler will actually be executed, given the type of
- exception.
-
- One scheme is to add additional information to exception_table_list
- as to the types of exceptions accepted by each handler. __throw
- can do the type comparisons and then determine if the handler is
- actually going to be executed.
-
- There is currently no significant level of debugging support
- available, other than to place a breakpoint on __throw. While
- this is sufficient in most cases, it would be helpful to be able to
- know where a given exception was going to be thrown to before it is
- actually thrown, and to be able to choose between stopping before
- every exception region (including cleanups), or just user-defined
- exception regions. This should be possible to do in the two-pass
- scheme by adding additional labels to __throw for appropriate
- breakpoints, and additional debugger commands could be added to
- query various state variables to determine what actions are to be
- performed next.
-
- Another major problem that is being worked on is the issue with stack
- unwinding on various platforms. Currently the only platforms that have
- support for the generation of a generic unwinder are the SPARC and MIPS.
- All other ports require per-function unwinders, which produce large
- amounts of code bloat.
-
- For setjmp/longjmp based exception handling, some of the details
- are as above, but there are some additional details. This section
- discusses the details.
-
- We don't use NOTE_INSN_EH_REGION_{BEG,END} pairs. We don't
- optimize EH regions yet. We don't have to worry about machine
- specific issues with unwinding the stack, as we rely upon longjmp
- for all the machine specific details. There is no variable context
- of a throw, just the one implied by the dynamic handler stack
- pointed to by the dynamic handler chain. There is no exception
- table, and no calls to __register_exceptions. __sjthrow is used
- instead of __throw, and it works by using the dynamic handler
- chain, and longjmp. -fasynchronous-exceptions has no effect, as
- the elimination of trivial exception regions is not yet performed.
-
- A frontend can set protect_cleanup_actions_with_terminate when all
- the cleanup actions should be protected with an EH region that
- calls terminate when an unhandled exception is throw. C++ does
- this, Ada does not. */
+ [ Add updated documentation on how to use this. ] */
#include "config.h"
-#include "eh-common.h"
#include "system.h"
#include "rtl.h"
#include "tree.h"
#include "flags.h"
-#include "except.h"
#include "function.h"
-#include "insn-flags.h"
#include "expr.h"
-#include "insn-codes.h"
-#include "regs.h"
-#include "hard-reg-set.h"
#include "insn-config.h"
-#include "recog.h"
+#include "except.h"
+#include "integrate.h"
+#include "hard-reg-set.h"
+#include "basic-block.h"
#include "output.h"
+#include "dwarf2asm.h"
+#include "dwarf2out.h"
+#include "dwarf2.h"
#include "toplev.h"
+#include "hashtab.h"
#include "intl.h"
-#include "obstack.h"
#include "ggc.h"
#include "tm_p.h"
-/* One to use setjmp/longjmp method of generating code for exception
- handling. */
-int exceptions_via_longjmp = 2;
+/* Provide defaults for stuff that may not be defined when using
+ sjlj exceptions. */
+#ifndef EH_RETURN_STACKADJ_RTX
+#define EH_RETURN_STACKADJ_RTX 0
+#endif
+#ifndef EH_RETURN_HANDLER_RTX
+#define EH_RETURN_HANDLER_RTX 0
+#endif
+#ifndef EH_RETURN_DATA_REGNO
+#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
+#endif
-/* One to enable asynchronous exception support. */
-int asynchronous_exceptions = 0;
+/* Nonzero means enable synchronous exceptions for non-call instructions. */
+int flag_non_call_exceptions;
-/* One to protect cleanup actions with a handler that calls
- __terminate, zero otherwise. */
+/* Protect cleanup actions with must-not-throw regions, with a call
+ to the given failure handler. */
+tree (*lang_protect_cleanup_actions) PARAMS ((void));
-int protect_cleanup_actions_with_terminate;
+/* Return true if type A catches type B. */
+int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
-/* A list of labels used for exception handlers. Created by
- find_exception_handler_labels for the optimization passes. */
+/* Map a type to a runtime object to match type. */
+tree (*lang_eh_runtime_type) PARAMS ((tree));
+/* A list of labels used for exception handlers. */
rtx exception_handler_labels;
-/* Keeps track of the label used as the context of a throw to rethrow an
- exception to the outer exception region. */
-
-struct label_node *outer_context_label_stack = NULL;
-
-/* Pseudos used to hold exception return data in the interim between
- __builtin_eh_return and the end of the function. */
+static int call_site_base;
+static int sjlj_funcdef_number;
+static htab_t type_to_runtime_map;
+
+/* Describe the SjLj_Function_Context structure. */
+static tree sjlj_fc_type_node;
+static int sjlj_fc_call_site_ofs;
+static int sjlj_fc_data_ofs;
+static int sjlj_fc_personality_ofs;
+static int sjlj_fc_lsda_ofs;
+static int sjlj_fc_jbuf_ofs;
+
+/* Describes one exception region. */
+struct eh_region
+{
+ /* The immediately surrounding region. */
+ struct eh_region *outer;
-static rtx eh_return_context;
-static rtx eh_return_stack_adjust;
-static rtx eh_return_handler;
+ /* The list of immediately contained regions. */
+ struct eh_region *inner;
+ struct eh_region *next_peer;
-/* This is used for targets which can call rethrow with an offset instead
- of an address. This is subtracted from the rethrow label we are
- interested in. */
+ /* An identifier for this region. */
+ int region_number;
-static rtx first_rethrow_symbol = NULL_RTX;
-static rtx final_rethrow = NULL_RTX;
-static rtx last_rethrow_symbol = NULL_RTX;
+ /* Each region does exactly one thing. */
+ enum eh_region_type
+ {
+ ERT_CLEANUP = 1,
+ ERT_TRY,
+ ERT_CATCH,
+ ERT_ALLOWED_EXCEPTIONS,
+ ERT_MUST_NOT_THROW,
+ ERT_THROW,
+ ERT_FIXUP
+ } type;
+
+ /* Holds the action to perform based on the preceeding type. */
+ union {
+ /* A list of catch blocks, a surrounding try block,
+ and the label for continuing after a catch. */
+ struct {
+ struct eh_region *catch;
+ struct eh_region *last_catch;
+ struct eh_region *prev_try;
+ rtx continue_label;
+ } try;
+
+ /* The list through the catch handlers, the type object
+ matched, and a pointer to the generated code. */
+ struct {
+ struct eh_region *next_catch;
+ struct eh_region *prev_catch;
+ tree type;
+ int filter;
+ } catch;
+
+ /* A tree_list of allowed types. */
+ struct {
+ tree type_list;
+ int filter;
+ } allowed;
+
+ /* The type given by a call to "throw foo();", or discovered
+ for a throw. */
+ struct {
+ tree type;
+ } throw;
+
+ /* Retain the cleanup expression even after expansion so that
+ we can match up fixup regions. */
+ struct {
+ tree exp;
+ } cleanup;
+
+ /* The real region (by expression and by pointer) that fixup code
+ should live in. */
+ struct {
+ tree cleanup_exp;
+ struct eh_region *real_region;
+ } fixup;
+ } u;
+
+ /* Entry point for this region's handler before landing pads are built. */
+ rtx label;
+ /* Entry point for this region's handler from the runtime eh library. */
+ rtx landing_pad;
-/* Prototypes for local functions. */
+ /* Entry point for this region's handler from an inner region. */
+ rtx post_landing_pad;
-static void push_eh_entry PARAMS ((struct eh_stack *));
-static struct eh_entry * pop_eh_entry PARAMS ((struct eh_stack *));
-static void enqueue_eh_entry PARAMS ((struct eh_queue *, struct eh_entry *));
-static struct eh_entry * dequeue_eh_entry PARAMS ((struct eh_queue *));
-static rtx call_get_eh_context PARAMS ((void));
-static void start_dynamic_cleanup PARAMS ((tree, tree));
-static void start_dynamic_handler PARAMS ((void));
-static void expand_rethrow PARAMS ((rtx));
-static void output_exception_table_entry PARAMS ((FILE *, int));
-static rtx scan_region PARAMS ((rtx, int, int *));
-static void eh_regs PARAMS ((rtx *, rtx *, rtx *, int));
-static void set_insn_eh_region PARAMS ((rtx *, int));
-#ifdef DONT_USE_BUILTIN_SETJMP
-static void jumpif_rtx PARAMS ((rtx, rtx));
-#endif
-static void find_exception_handler_labels_1 PARAMS ((rtx));
-static void mark_eh_node PARAMS ((struct eh_node *));
-static void mark_eh_stack PARAMS ((struct eh_stack *));
-static void mark_eh_queue PARAMS ((struct eh_queue *));
-static void mark_tree_label_node PARAMS ((struct label_node *));
-static void mark_func_eh_entry PARAMS ((void *));
-static rtx create_rethrow_ref PARAMS ((int));
-static void push_entry PARAMS ((struct eh_stack *, struct eh_entry*));
-static void receive_exception_label PARAMS ((rtx));
-static int new_eh_region_entry PARAMS ((int, rtx));
-static int find_func_region PARAMS ((int));
-static int find_func_region_from_symbol PARAMS ((rtx));
-static void clear_function_eh_region PARAMS ((void));
-static void process_nestinfo PARAMS ((int, eh_nesting_info *, int *));
-rtx expand_builtin_return_addr PARAMS ((enum built_in_function, int, rtx));
-static void emit_cleanup_handler PARAMS ((struct eh_entry *));
-static int eh_region_from_symbol PARAMS ((rtx));
+ /* The RESX insn for handing off control to the next outermost handler,
+ if appropriate. */
+ rtx resume;
+};
-
-/* Various support routines to manipulate the various data structures
- used by the exception handling code. */
+/* Used to save exception status for each function. */
+struct eh_status
+{
+ /* The tree of all regions for this function. */
+ struct eh_region *region_tree;
-extern struct obstack permanent_obstack;
+ /* The same information as an indexable array. */
+ struct eh_region **region_array;
-/* Generate a SYMBOL_REF for rethrow to use */
+ /* The most recently open region. */
+ struct eh_region *cur_region;
-static rtx
-create_rethrow_ref (region_num)
- int region_num;
-{
- rtx def;
- const char *ptr;
- char buf[60];
+ /* This is the region for which we are processing catch blocks. */
+ struct eh_region *try_region;
- ASM_GENERATE_INTERNAL_LABEL (buf, "LRTH", region_num);
- ptr = ggc_strdup (buf);
- def = gen_rtx_SYMBOL_REF (Pmode, ptr);
- SYMBOL_REF_NEED_ADJUST (def) = 1;
+ /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
+ node is itself a TREE_CHAINed list of handlers for regions that
+ are not yet closed. The TREE_VALUE of each entry contains the
+ handler for the corresponding entry on the ehstack. */
+ tree protect_list;
- return def;
-}
+ rtx filter;
+ rtx exc_ptr;
-/* Push a label entry onto the given STACK. */
+ int built_landing_pads;
+ int last_region_number;
-void
-push_label_entry (stack, rlabel, tlabel)
- struct label_node **stack;
- rtx rlabel;
- tree tlabel;
-{
- struct label_node *newnode
- = (struct label_node *) xmalloc (sizeof (struct label_node));
+ varray_type ttype_data;
+ varray_type ehspec_data;
+ varray_type action_record_data;
- if (rlabel)
- newnode->u.rlabel = rlabel;
- else
- newnode->u.tlabel = tlabel;
- newnode->chain = *stack;
- *stack = newnode;
-}
+ struct call_site_record
+ {
+ rtx landing_pad;
+ int action;
+ } *call_site_data;
+ int call_site_data_used;
+ int call_site_data_size;
+
+ rtx ehr_stackadj;
+ rtx ehr_handler;
+ rtx ehr_label;
+
+ rtx sjlj_fc;
+ rtx sjlj_exit_after;
+};
-/* Pop a label entry from the given STACK. */
+
+static void mark_eh_region PARAMS ((struct eh_region *));
+
+static int t2r_eq PARAMS ((const PTR,
+ const PTR));
+static hashval_t t2r_hash PARAMS ((const PTR));
+static int t2r_mark_1 PARAMS ((PTR *, PTR));
+static void t2r_mark PARAMS ((PTR));
+static void add_type_for_runtime PARAMS ((tree));
+static tree lookup_type_for_runtime PARAMS ((tree));
+
+static struct eh_region *expand_eh_region_end PARAMS ((void));
+
+static rtx get_exception_filter PARAMS ((struct function *));
+
+static void collect_eh_region_array PARAMS ((void));
+static void resolve_fixup_regions PARAMS ((void));
+static void remove_fixup_regions PARAMS ((void));
+static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
+
+static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
+ struct inline_remap *));
+static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
+ struct eh_region **));
+static int ttypes_filter_eq PARAMS ((const PTR,
+ const PTR));
+static hashval_t ttypes_filter_hash PARAMS ((const PTR));
+static int ehspec_filter_eq PARAMS ((const PTR,
+ const PTR));
+static hashval_t ehspec_filter_hash PARAMS ((const PTR));
+static int add_ttypes_entry PARAMS ((htab_t, tree));
+static int add_ehspec_entry PARAMS ((htab_t, htab_t,
+ tree));
+static void assign_filter_values PARAMS ((void));
+static void build_post_landing_pads PARAMS ((void));
+static void connect_post_landing_pads PARAMS ((void));
+static void dw2_build_landing_pads PARAMS ((void));
+
+struct sjlj_lp_info;
+static bool sjlj_find_directly_reachable_regions
+ PARAMS ((struct sjlj_lp_info *));
+static void sjlj_assign_call_site_values
+ PARAMS ((rtx, struct sjlj_lp_info *));
+static void sjlj_mark_call_sites
+ PARAMS ((struct sjlj_lp_info *));
+static void sjlj_emit_function_enter PARAMS ((rtx));
+static void sjlj_emit_function_exit PARAMS ((void));
+static void sjlj_emit_dispatch_table
+ PARAMS ((rtx, struct sjlj_lp_info *));
+static void sjlj_build_landing_pads PARAMS ((void));
+
+static void remove_exception_handler_label PARAMS ((rtx));
+static void remove_eh_handler PARAMS ((struct eh_region *));
+
+struct reachable_info;
+
+/* The return value of reachable_next_level. */
+enum reachable_code
+{
+ /* The given exception is not processed by the given region. */
+ RNL_NOT_CAUGHT,
+ /* The given exception may need processing by the given region. */
+ RNL_MAYBE_CAUGHT,
+ /* The given exception is completely processed by the given region. */
+ RNL_CAUGHT,
+ /* The given exception is completely processed by the runtime. */
+ RNL_BLOCKED
+};
-rtx
-pop_label_entry (stack)
- struct label_node **stack;
-{
- rtx label;
- struct label_node *tempnode;
+static int check_handled PARAMS ((tree, tree));
+static void add_reachable_handler
+ PARAMS ((struct reachable_info *, struct eh_region *,
+ struct eh_region *));
+static enum reachable_code reachable_next_level
+ PARAMS ((struct eh_region *, tree, struct reachable_info *));
+
+static int action_record_eq PARAMS ((const PTR,
+ const PTR));
+static hashval_t action_record_hash PARAMS ((const PTR));
+static int add_action_record PARAMS ((htab_t, int, int));
+static int collect_one_action_chain PARAMS ((htab_t,
+ struct eh_region *));
+static int add_call_site PARAMS ((rtx, int));
+
+static void push_uleb128 PARAMS ((varray_type *,
+ unsigned int));
+static void push_sleb128 PARAMS ((varray_type *, int));
+#ifndef HAVE_AS_LEB128
+static int dw2_size_of_call_site_table PARAMS ((void));
+static int sjlj_size_of_call_site_table PARAMS ((void));
+#endif
+static void dw2_output_call_site_table PARAMS ((void));
+static void sjlj_output_call_site_table PARAMS ((void));
- if (! *stack)
- return NULL_RTX;
+
+/* Routine to see if exception handling is turned on.
+ DO_WARN is non-zero if we want to inform the user that exception
+ handling is turned off.
- tempnode = *stack;
- label = tempnode->u.rlabel;
- *stack = (*stack)->chain;
- free (tempnode);
+ This is used to ensure that -fexceptions has been specified if the
+ compiler tries to use any exception-specific functions. */
- return label;
+int
+doing_eh (do_warn)
+ int do_warn;
+{
+ if (! flag_exceptions)
+ {
+ static int warned = 0;
+ if (! warned && do_warn)
+ {
+ error ("exception handling disabled, use -fexceptions to enable");
+ warned = 1;
+ }
+ return 0;
+ }
+ return 1;
}
-/* Return the top element of the given STACK. */
-
-tree
-top_label_entry (stack)
- struct label_node **stack;
+
+void
+init_eh ()
{
- if (! *stack)
- return NULL_TREE;
+ ggc_add_rtx_root (&exception_handler_labels, 1);
- return (*stack)->u.tlabel;
-}
+ if (! flag_exceptions)
+ return;
-/* Get an exception label. */
+ type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
+ ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
-rtx
-gen_exception_label ()
-{
- rtx lab;
- lab = gen_label_rtx ();
- return lab;
-}
+ /* Create the SjLj_Function_Context structure. This should match
+ the definition in unwind-sjlj.c. */
+ if (USING_SJLJ_EXCEPTIONS)
+ {
+ tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
-/* Push a new eh_node entry onto STACK. */
+ sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
+ ggc_add_tree_root (&sjlj_fc_type_node, 1);
-static void
-push_eh_entry (stack)
- struct eh_stack *stack;
-{
- struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
- struct eh_entry *entry = (struct eh_entry *) xmalloc (sizeof (struct eh_entry));
-
- rtx rlab = gen_exception_label ();
- entry->finalization = NULL_TREE;
- entry->label_used = 0;
- entry->exception_handler_label = rlab;
- entry->false_label = NULL_RTX;
- if (! flag_new_exceptions)
- entry->outer_context = gen_label_rtx ();
- else
- entry->outer_context = create_rethrow_ref (CODE_LABEL_NUMBER (rlab));
- entry->rethrow_label = entry->outer_context;
- entry->goto_entry_p = 0;
+ f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
+ build_pointer_type (sjlj_fc_type_node));
+ DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
- node->entry = entry;
- node->chain = stack->top;
- stack->top = node;
-}
+ f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
+ integer_type_node);
+ DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
-/* Push an existing entry onto a stack. */
+ tmp = build_index_type (build_int_2 (4 - 1, 0));
+ tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
+ f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
+ DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
-static void
-push_entry (stack, entry)
- struct eh_stack *stack;
- struct eh_entry *entry;
-{
- struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
- node->entry = entry;
- node->chain = stack->top;
- stack->top = node;
-}
+ f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
+ ptr_type_node);
+ DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
-/* Pop an entry from the given STACK. */
+ f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
+ ptr_type_node);
+ DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
-static struct eh_entry *
-pop_eh_entry (stack)
- struct eh_stack *stack;
-{
- struct eh_node *tempnode;
- struct eh_entry *tempentry;
-
- tempnode = stack->top;
- tempentry = tempnode->entry;
- stack->top = stack->top->chain;
- free (tempnode);
+#ifdef DONT_USE_BUILTIN_SETJMP
+#ifdef JMP_BUF_SIZE
+ tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
+#else
+ /* Should be large enough for most systems, if it is not,
+ JMP_BUF_SIZE should be defined with the proper value. It will
+ also tend to be larger than necessary for most systems, a more
+ optimal port will define JMP_BUF_SIZE. */
+ tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
+#endif
+#else
+ /* This is 2 for builtin_setjmp, plus whatever the target requires
+ via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
+ tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
+ / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
+#endif
+ tmp = build_index_type (tmp);
+ tmp = build_array_type (ptr_type_node, tmp);
+ f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
+#ifdef DONT_USE_BUILTIN_SETJMP
+ /* We don't know what the alignment requirements of the
+ runtime's jmp_buf has. Overestimate. */
+ DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
+ DECL_USER_ALIGN (f_jbuf) = 1;
+#endif
+ DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
+
+ TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
+ TREE_CHAIN (f_prev) = f_cs;
+ TREE_CHAIN (f_cs) = f_data;
+ TREE_CHAIN (f_data) = f_per;
+ TREE_CHAIN (f_per) = f_lsda;
+ TREE_CHAIN (f_lsda) = f_jbuf;
+
+ layout_type (sjlj_fc_type_node);
+
+ /* Cache the interesting field offsets so that we have
+ easy access from rtl. */
+ sjlj_fc_call_site_ofs
+ = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
+ + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
+ sjlj_fc_data_ofs
+ = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
+ + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
+ sjlj_fc_personality_ofs
+ = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
+ + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
+ sjlj_fc_lsda_ofs
+ = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
+ + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
+ sjlj_fc_jbuf_ofs
+ = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
+ + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
+ }
+}
- return tempentry;
+void
+init_eh_for_function ()
+{
+ cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
}
-/* Enqueue an ENTRY onto the given QUEUE. */
+/* Mark EH for GC. */
static void
-enqueue_eh_entry (queue, entry)
- struct eh_queue *queue;
- struct eh_entry *entry;
+mark_eh_region (region)
+ struct eh_region *region;
{
- struct eh_node *node = (struct eh_node *) xmalloc (sizeof (struct eh_node));
+ if (! region)
+ return;
- node->entry = entry;
- node->chain = NULL;
+ switch (region->type)
+ {
+ case ERT_CLEANUP:
+ ggc_mark_tree (region->u.cleanup.exp);
+ break;
+ case ERT_TRY:
+ ggc_mark_rtx (region->u.try.continue_label);
+ break;
+ case ERT_CATCH:
+ ggc_mark_tree (region->u.catch.type);
+ break;
+ case ERT_ALLOWED_EXCEPTIONS:
+ ggc_mark_tree (region->u.allowed.type_list);
+ break;
+ case ERT_MUST_NOT_THROW:
+ break;
+ case ERT_THROW:
+ ggc_mark_tree (region->u.throw.type);
+ break;
+ case ERT_FIXUP:
+ ggc_mark_tree (region->u.fixup.cleanup_exp);
+ break;
+ default:
+ abort ();
+ }
- if (queue->head == NULL)
- queue->head = node;
- else
- queue->tail->chain = node;
- queue->tail = node;
+ ggc_mark_rtx (region->label);
+ ggc_mark_rtx (region->resume);
+ ggc_mark_rtx (region->landing_pad);
+ ggc_mark_rtx (region->post_landing_pad);
}
-/* Dequeue an entry from the given QUEUE. */
-
-static struct eh_entry *
-dequeue_eh_entry (queue)
- struct eh_queue *queue;
+void
+mark_eh_status (eh)
+ struct eh_status *eh;
{
- struct eh_node *tempnode;
- struct eh_entry *tempentry;
+ int i;
- if (queue->head == NULL)
- return NULL;
+ if (eh == 0)
+ return;
- tempnode = queue->head;
- queue->head = queue->head->chain;
+ /* If we've called collect_eh_region_array, use it. Otherwise walk
+ the tree non-recursively. */
+ if (eh->region_array)
+ {
+ for (i = eh->last_region_number; i > 0; --i)
+ {
+ struct eh_region *r = eh->region_array[i];
+ if (r && r->region_number == i)
+ mark_eh_region (r);
+ }
+ }
+ else if (eh->region_tree)
+ {
+ struct eh_region *r = eh->region_tree;
+ while (1)
+ {
+ mark_eh_region (r);
+ if (r->inner)
+ r = r->inner;
+ else if (r->next_peer)
+ r = r->next_peer;
+ else
+ {
+ do {
+ r = r->outer;
+ if (r == NULL)
+ goto tree_done;
+ } while (r->next_peer == NULL);
+ r = r->next_peer;
+ }
+ }
+ tree_done:;
+ }
+
+ ggc_mark_tree (eh->protect_list);
+ ggc_mark_rtx (eh->filter);
+ ggc_mark_rtx (eh->exc_ptr);
+ ggc_mark_tree_varray (eh->ttype_data);
- tempentry = tempnode->entry;
- free (tempnode);
+ if (eh->call_site_data)
+ {
+ for (i = eh->call_site_data_used - 1; i >= 0; --i)
+ ggc_mark_rtx (eh->call_site_data[i].landing_pad);
+ }
+
+ ggc_mark_rtx (eh->ehr_stackadj);
+ ggc_mark_rtx (eh->ehr_handler);
+ ggc_mark_rtx (eh->ehr_label);
- return tempentry;
+ ggc_mark_rtx (eh->sjlj_fc);
+ ggc_mark_rtx (eh->sjlj_exit_after);
}
-static void
-receive_exception_label (handler_label)
- rtx handler_label;
+void
+free_eh_status (f)
+ struct function *f;
{
- rtx around_label = NULL_RTX;
+ struct eh_status *eh = f->eh;
- if (! flag_new_exceptions || exceptions_via_longjmp)
+ if (eh->region_array)
{
- around_label = gen_label_rtx ();
- emit_jump (around_label);
- emit_barrier ();
+ int i;
+ for (i = eh->last_region_number; i > 0; --i)
+ {
+ struct eh_region *r = eh->region_array[i];
+ /* Mind we don't free a region struct more than once. */
+ if (r && r->region_number == i)
+ free (r);
+ }
+ free (eh->region_array);
+ }
+ else if (eh->region_tree)
+ {
+ struct eh_region *next, *r = eh->region_tree;
+ while (1)
+ {
+ if (r->inner)
+ r = r->inner;
+ else if (r->next_peer)
+ {
+ next = r->next_peer;
+ free (r);
+ r = next;
+ }
+ else
+ {
+ do {
+ next = r->outer;
+ free (r);
+ r = next;
+ if (r == NULL)
+ goto tree_done;
+ } while (r->next_peer == NULL);
+ next = r->next_peer;
+ free (r);
+ r = next;
+ }
+ }
+ tree_done:;
}
- emit_label (handler_label);
-
- if (! exceptions_via_longjmp)
+ VARRAY_FREE (eh->ttype_data);
+ VARRAY_FREE (eh->ehspec_data);
+ VARRAY_FREE (eh->action_record_data);
+ if (eh->call_site_data)
+ free (eh->call_site_data);
+
+ free (eh);
+ f->eh = NULL;
+}
+
+
+/* Start an exception handling region. All instructions emitted
+ after this point are considered to be part of the region until
+ expand_eh_region_end is invoked. */
+
+void
+expand_eh_region_start ()
+{
+ struct eh_region *new_region;
+ struct eh_region *cur_region;
+ rtx note;
+
+ if (! doing_eh (0))
+ return;
+
+ /* Insert a new blank region as a leaf in the tree. */
+ new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
+ cur_region = cfun->eh->cur_region;
+ new_region->outer = cur_region;
+ if (cur_region)
{
-#ifdef HAVE_exception_receiver
- if (HAVE_exception_receiver)
- emit_insn (gen_exception_receiver ());
- else
-#endif
-#ifdef HAVE_nonlocal_goto_receiver
- if (HAVE_nonlocal_goto_receiver)
- emit_insn (gen_nonlocal_goto_receiver ());
- else
-#endif
- { /* Nothing */ }
+ new_region->next_peer = cur_region->inner;
+ cur_region->inner = new_region;
}
else
{
-#ifndef DONT_USE_BUILTIN_SETJMP
- expand_builtin_setjmp_receiver (handler_label);
-#endif
+ new_region->next_peer = cfun->eh->region_tree;
+ cfun->eh->region_tree = new_region;
}
+ cfun->eh->cur_region = new_region;
- if (around_label)
- emit_label (around_label);
+ /* Create a note marking the start of this region. */
+ new_region->region_number = ++cfun->eh->last_region_number;
+ note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
+ NOTE_EH_HANDLER (note) = new_region->region_number;
}
+/* Common code to end a region. Returns the region just ended. */
-struct func_eh_entry
+static struct eh_region *
+expand_eh_region_end ()
{
- int range_number; /* EH region number from EH NOTE insn's. */
- rtx rethrow_label; /* Label for rethrow. */
- int rethrow_ref; /* Is rethrow_label referenced? */
- int emitted; /* 1 if this entry has been emitted in assembly file. */
- struct handler_info *handlers;
-};
+ struct eh_region *cur_region = cfun->eh->cur_region;
+ rtx note;
+ /* Create a nute marking the end of this region. */
+ note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
+ NOTE_EH_HANDLER (note) = cur_region->region_number;
-/* table of function eh regions */
-static struct func_eh_entry *function_eh_regions = NULL;
-static int num_func_eh_entries = 0;
-static int current_func_eh_entry = 0;
+ /* Pop. */
+ cfun->eh->cur_region = cur_region->outer;
-#define SIZE_FUNC_EH(X) (sizeof (struct func_eh_entry) * X)
+ return cur_region;
+}
-/* Add a new eh_entry for this function. The number returned is an
- number which uniquely identifies this exception range. */
+/* End an exception handling region for a cleanup. HANDLER is an
+ expression to expand for the cleanup. */
-static int
-new_eh_region_entry (note_eh_region, rethrow)
- int note_eh_region;
- rtx rethrow;
+void
+expand_eh_region_end_cleanup (handler)
+ tree handler;
{
- if (current_func_eh_entry == num_func_eh_entries)
- {
- if (num_func_eh_entries == 0)
- {
- function_eh_regions =
- (struct func_eh_entry *) xmalloc (SIZE_FUNC_EH (50));
- num_func_eh_entries = 50;
- }
- else
- {
- num_func_eh_entries = num_func_eh_entries * 3 / 2;
- function_eh_regions = (struct func_eh_entry *)
- xrealloc (function_eh_regions, SIZE_FUNC_EH (num_func_eh_entries));
- }
- }
- function_eh_regions[current_func_eh_entry].range_number = note_eh_region;
- if (rethrow == NULL_RTX)
- function_eh_regions[current_func_eh_entry].rethrow_label =
- create_rethrow_ref (note_eh_region);
- else
- function_eh_regions[current_func_eh_entry].rethrow_label = rethrow;
- function_eh_regions[current_func_eh_entry].handlers = NULL;
- function_eh_regions[current_func_eh_entry].emitted = 0;
-
- return current_func_eh_entry++;
-}
-
-/* Add new handler information to an exception range. The first parameter
- specifies the range number (returned from new_eh_entry()). The second
- parameter specifies the handler. By default the handler is inserted at
- the end of the list. A handler list may contain only ONE NULL_TREE
- typeinfo entry. Regardless where it is positioned, a NULL_TREE entry
- is always output as the LAST handler in the exception table for a region. */
-
-void
-add_new_handler (region, newhandler)
- int region;
- struct handler_info *newhandler;
-{
- struct handler_info *last;
-
- /* If find_func_region returns -1, callers might attempt to pass us
- this region number. If that happens, something has gone wrong;
- -1 is never a valid region. */
- if (region == -1)
- abort ();
+ struct eh_region *region;
+ tree protect_cleanup_actions;
+ rtx around_label;
+ rtx data_save[2];
- newhandler->next = NULL;
- last = function_eh_regions[region].handlers;
- if (last == NULL)
- function_eh_regions[region].handlers = newhandler;
- else
- {
- for ( ; ; last = last->next)
- {
- if (last->type_info == CATCH_ALL_TYPE)
- pedwarn ("additional handler after ...");
- if (last->next == NULL)
- break;
- }
- last->next = newhandler;
- }
-}
+ if (! doing_eh (0))
+ return;
-/* Remove a handler label. The handler label is being deleted, so all
- regions which reference this handler should have it removed from their
- list of possible handlers. Any region which has the final handler
- removed can be deleted. */
+ region = expand_eh_region_end ();
+ region->type = ERT_CLEANUP;
+ region->label = gen_label_rtx ();
+ region->u.cleanup.exp = handler;
-void remove_handler (removing_label)
- rtx removing_label;
-{
- struct handler_info *handler, *last;
- int x;
- for (x = 0 ; x < current_func_eh_entry; ++x)
- {
- last = NULL;
- handler = function_eh_regions[x].handlers;
- for ( ; handler; last = handler, handler = handler->next)
- if (handler->handler_label == removing_label)
- {
- if (last)
- {
- last->next = handler->next;
- handler = last;
- }
- else
- function_eh_regions[x].handlers = handler->next;
- }
- }
-}
+ around_label = gen_label_rtx ();
+ emit_jump (around_label);
-/* This function will return a malloc'd pointer to an array of
- void pointer representing the runtime match values that
- currently exist in all regions. */
+ emit_label (region->label);
-int
-find_all_handler_type_matches (array)
- void ***array;
-{
- struct handler_info *handler, *last;
- int x,y;
- void *val;
- void **ptr;
- int max_ptr;
- int n_ptr = 0;
+ /* Give the language a chance to specify an action to be taken if an
+ exception is thrown that would propogate out of the HANDLER. */
+ protect_cleanup_actions
+ = (lang_protect_cleanup_actions
+ ? (*lang_protect_cleanup_actions) ()
+ : NULL_TREE);
- *array = NULL;
+ if (protect_cleanup_actions)
+ expand_eh_region_start ();
- if (!doing_eh (0) || ! flag_new_exceptions)
- return 0;
+ /* In case this cleanup involves an inline destructor with a try block in
+ it, we need to save the EH return data registers around it. */
+ data_save[0] = gen_reg_rtx (Pmode);
+ emit_move_insn (data_save[0], get_exception_pointer (cfun));
+ data_save[1] = gen_reg_rtx (word_mode);
+ emit_move_insn (data_save[1], get_exception_filter (cfun));
- max_ptr = 100;
- ptr = (void **) xmalloc (max_ptr * sizeof (void *));
+ expand_expr (handler, const0_rtx, VOIDmode, 0);
- for (x = 0 ; x < current_func_eh_entry; x++)
- {
- last = NULL;
- handler = function_eh_regions[x].handlers;
- for ( ; handler; last = handler, handler = handler->next)
- {
- val = handler->type_info;
- if (val != NULL && val != CATCH_ALL_TYPE)
- {
- /* See if this match value has already been found. */
- for (y = 0; y < n_ptr; y++)
- if (ptr[y] == val)
- break;
+ emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
+ emit_move_insn (cfun->eh->filter, data_save[1]);
- /* If we break early, we already found this value. */
- if (y < n_ptr)
- continue;
+ if (protect_cleanup_actions)
+ expand_eh_region_end_must_not_throw (protect_cleanup_actions);
- /* Do we need to allocate more space? */
- if (n_ptr >= max_ptr)
- {
- max_ptr += max_ptr / 2;
- ptr = (void **) xrealloc (ptr, max_ptr * sizeof (void *));
- }
- ptr[n_ptr] = val;
- n_ptr++;
- }
- }
- }
+ /* We need any stack adjustment complete before the around_label. */
+ do_pending_stack_adjust ();
- if (n_ptr == 0)
- {
- free (ptr);
- ptr = NULL;
- }
- *array = ptr;
- return n_ptr;
+ /* We delay the generation of the _Unwind_Resume until we generate
+ landing pads. We emit a marker here so as to get good control
+ flow data in the meantime. */
+ region->resume
+ = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
+ emit_barrier ();
+
+ emit_label (around_label);
}
-/* Create a new handler structure initialized with the handler label and
- typeinfo fields passed in. */
+/* End an exception handling region for a try block, and prepares
+ for subsequent calls to expand_start_catch. */
-struct handler_info *
-get_new_handler (handler, typeinfo)
- rtx handler;
- void *typeinfo;
+void
+expand_start_all_catch ()
{
- struct handler_info* ptr;
- ptr = (struct handler_info *) xmalloc (sizeof (struct handler_info));
- ptr->handler_label = handler;
- ptr->handler_number = CODE_LABEL_NUMBER (handler);
- ptr->type_info = typeinfo;
- ptr->next = NULL;
+ struct eh_region *region;
- return ptr;
-}
+ if (! doing_eh (1))
+ return;
+ region = expand_eh_region_end ();
+ region->type = ERT_TRY;
+ region->u.try.prev_try = cfun->eh->try_region;
+ region->u.try.continue_label = gen_label_rtx ();
+ cfun->eh->try_region = region;
+
+ emit_jump (region->u.try.continue_label);
+}
-/* Find the index in function_eh_regions associated with a NOTE region. If
- the region cannot be found, a -1 is returned. */
+/* Begin a catch clause. TYPE is the type caught, or null if this is
+ a catch-all clause. */
-static int
-find_func_region (insn_region)
- int insn_region;
+void
+expand_start_catch (type)
+ tree type;
{
- int x;
- for (x = 0; x < current_func_eh_entry; x++)
- if (function_eh_regions[x].range_number == insn_region)
- return x;
+ struct eh_region *t, *c, *l;
+
+ if (! doing_eh (0))
+ return;
+
+ if (type)
+ add_type_for_runtime (type);
+ expand_eh_region_start ();
+
+ t = cfun->eh->try_region;
+ c = cfun->eh->cur_region;
+ c->type = ERT_CATCH;
+ c->u.catch.type = type;
+ c->label = gen_label_rtx ();
+
+ l = t->u.try.last_catch;
+ c->u.catch.prev_catch = l;
+ if (l)
+ l->u.catch.next_catch = c;
+ else
+ t->u.try.catch = c;
+ t->u.try.last_catch = c;
- return -1;
+ emit_label (c->label);
}
-/* Get a pointer to the first handler in an exception region's list. */
+/* End a catch clause. Control will resume after the try/catch block. */
-struct handler_info *
-get_first_handler (region)
- int region;
+void
+expand_end_catch ()
{
- int r = find_func_region (region);
- if (r == -1)
- abort ();
- return function_eh_regions[r].handlers;
+ struct eh_region *try_region, *catch_region;
+
+ if (! doing_eh (0))
+ return;
+
+ catch_region = expand_eh_region_end ();
+ try_region = cfun->eh->try_region;
+
+ emit_jump (try_region->u.try.continue_label);
}
-/* Clean out the function_eh_region table and free all memory */
+/* End a sequence of catch handlers for a try block. */
-static void
-clear_function_eh_region ()
+void
+expand_end_all_catch ()
{
- int x;
- struct handler_info *ptr, *next;
- for (x = 0; x < current_func_eh_entry; x++)
- for (ptr = function_eh_regions[x].handlers; ptr != NULL; ptr = next)
- {
- next = ptr->next;
- free (ptr);
- }
- if (function_eh_regions)
- free (function_eh_regions);
- num_func_eh_entries = 0;
- current_func_eh_entry = 0;
+ struct eh_region *try_region;
+
+ if (! doing_eh (0))
+ return;
+
+ try_region = cfun->eh->try_region;
+ cfun->eh->try_region = try_region->u.try.prev_try;
+
+ emit_label (try_region->u.try.continue_label);
}
-/* Make a duplicate of an exception region by copying all the handlers
- for an exception region. Return the new handler index. The final
- parameter is a routine which maps old labels to new ones. */
+/* End an exception region for an exception type filter. ALLOWED is a
+ TREE_LIST of types to be matched by the runtime. FAILURE is an
+ expression to invoke if a mismatch ocurrs. */
-int
-duplicate_eh_handlers (old_note_eh_region, new_note_eh_region, map)
- int old_note_eh_region, new_note_eh_region;
- rtx (*map) PARAMS ((rtx));
+void
+expand_eh_region_end_allowed (allowed, failure)
+ tree allowed, failure;
{
- struct handler_info *ptr, *new_ptr;
- int new_region, region;
+ struct eh_region *region;
+ rtx around_label;
- region = find_func_region (old_note_eh_region);
- if (region == -1)
- /* Cannot duplicate non-existant exception region. */
- abort ();
+ if (! doing_eh (0))
+ return;
- /* duplicate_eh_handlers may have been called during a symbol remap. */
- new_region = find_func_region (new_note_eh_region);
- if (new_region != -1)
- return (new_region);
+ region = expand_eh_region_end ();
+ region->type = ERT_ALLOWED_EXCEPTIONS;
+ region->u.allowed.type_list = allowed;
+ region->label = gen_label_rtx ();
- new_region = new_eh_region_entry (new_note_eh_region, NULL_RTX);
+ for (; allowed ; allowed = TREE_CHAIN (allowed))
+ add_type_for_runtime (TREE_VALUE (allowed));
- ptr = function_eh_regions[region].handlers;
+ /* We must emit the call to FAILURE here, so that if this function
+ throws a different exception, that it will be processed by the
+ correct region. */
- for ( ; ptr; ptr = ptr->next)
- {
- new_ptr = get_new_handler (map (ptr->handler_label), ptr->type_info);
- add_new_handler (new_region, new_ptr);
- }
+ /* If there are any pending stack adjustments, we must emit them
+ before we branch -- otherwise, we won't know how much adjustment
+ is required later. */
+ do_pending_stack_adjust ();
+ around_label = gen_label_rtx ();
+ emit_jump (around_label);
+
+ emit_label (region->label);
+ expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
+ /* We must adjust the stack before we reach the AROUND_LABEL because
+ the call to FAILURE does not occur on all paths to the
+ AROUND_LABEL. */
+ do_pending_stack_adjust ();
- return new_region;
+ emit_label (around_label);
}
+/* End an exception region for a must-not-throw filter. FAILURE is an
+ expression invoke if an uncaught exception propagates this far.
-/* Given a rethrow symbol, find the EH region number this is for. */
+ This is conceptually identical to expand_eh_region_end_allowed with
+ an empty allowed list (if you passed "std::terminate" instead of
+ "__cxa_call_unexpected"), but they are represented differently in
+ the C++ LSDA. */
-static int
-eh_region_from_symbol (sym)
- rtx sym;
+void
+expand_eh_region_end_must_not_throw (failure)
+ tree failure;
{
- int x;
- if (sym == last_rethrow_symbol)
- return 1;
- for (x = 0; x < current_func_eh_entry; x++)
- if (function_eh_regions[x].rethrow_label == sym)
- return function_eh_regions[x].range_number;
- return -1;
+ struct eh_region *region;
+ rtx around_label;
+
+ if (! doing_eh (0))
+ return;
+
+ region = expand_eh_region_end ();
+ region->type = ERT_MUST_NOT_THROW;
+ region->label = gen_label_rtx ();
+
+ /* We must emit the call to FAILURE here, so that if this function
+ throws a different exception, that it will be processed by the
+ correct region. */
+
+ around_label = gen_label_rtx ();
+ emit_jump (around_label);
+
+ emit_label (region->label);
+ expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
+
+ emit_label (around_label);
}
-/* Like find_func_region, but using the rethrow symbol for the region
- rather than the region number itself. */
+/* End an exception region for a throw. No handling goes on here,
+ but it's the easiest way for the front-end to indicate what type
+ is being thrown. */
-static int
-find_func_region_from_symbol (sym)
- rtx sym;
+void
+expand_eh_region_end_throw (type)
+ tree type;
{
- return find_func_region (eh_region_from_symbol (sym));
+ struct eh_region *region;
+
+ if (! doing_eh (0))
+ return;
+
+ region = expand_eh_region_end ();
+ region->type = ERT_THROW;
+ region->u.throw.type = type;
}
-/* When inlining/unrolling, we have to map the symbols passed to
- __rethrow as well. This performs the remap. If a symbol isn't foiund,
- the original one is returned. This is not an efficient routine,
- so don't call it on everything!! */
+/* End a fixup region. Within this region the cleanups for the immediately
+ enclosing region are _not_ run. This is used for goto cleanup to avoid
+ destroying an object twice.
+
+ This would be an extraordinarily simple prospect, were it not for the
+ fact that we don't actually know what the immediately enclosing region
+ is. This surprising fact is because expand_cleanups is currently
+ generating a sequence that it will insert somewhere else. We collect
+ the proper notion of "enclosing" in convert_from_eh_region_ranges. */
-rtx
-rethrow_symbol_map (sym, map)
- rtx sym;
- rtx (*map) PARAMS ((rtx));
+void
+expand_eh_region_end_fixup (handler)
+ tree handler;
{
- int x, y;
+ struct eh_region *fixup;
- if (! flag_new_exceptions)
- return sym;
+ if (! doing_eh (0))
+ return;
- for (x = 0; x < current_func_eh_entry; x++)
- if (function_eh_regions[x].rethrow_label == sym)
- {
- /* We've found the original region, now lets determine which region
- this now maps to. */
- rtx l1 = function_eh_regions[x].handlers->handler_label;
- rtx l2 = map (l1);
- y = CODE_LABEL_NUMBER (l2); /* This is the new region number */
- x = find_func_region (y); /* Get the new permanent region */
- if (x == -1) /* Hmm, Doesn't exist yet */
- {
- x = duplicate_eh_handlers (CODE_LABEL_NUMBER (l1), y, map);
- /* Since we're mapping it, it must be used. */
- function_eh_regions[x].rethrow_ref = 1;
- }
- return function_eh_regions[x].rethrow_label;
- }
- return sym;
+ fixup = expand_eh_region_end ();
+ fixup->type = ERT_FIXUP;
+ fixup->u.fixup.cleanup_exp = handler;
}
-/* Returns nonzero if the rethrow label for REGION is referenced
- somewhere (i.e. we rethrow out of REGION or some other region
- masquerading as REGION). */
+/* Return an rtl expression for a pointer to the exception object
+ within a handler. */
-int
-rethrow_used (region)
- int region;
+rtx
+get_exception_pointer (fun)
+ struct function *fun;
{
- if (flag_new_exceptions)
+ rtx exc_ptr = fun->eh->exc_ptr;
+ if (fun == cfun && ! exc_ptr)
{
- int ret = function_eh_regions[find_func_region (region)].rethrow_ref;
- return ret;
+ exc_ptr = gen_reg_rtx (Pmode);
+ fun->eh->exc_ptr = exc_ptr;
}
- return 0;
+ return exc_ptr;
}
-
-/* Routine to see if exception handling is turned on.
- DO_WARN is non-zero if we want to inform the user that exception
- handling is turned off.
+/* Return an rtl expression for the exception dispatch filter
+ within a handler. */
- This is used to ensure that -fexceptions has been specified if the
- compiler tries to use any exception-specific functions. */
-
-int
-doing_eh (do_warn)
- int do_warn;
+static rtx
+get_exception_filter (fun)
+ struct function *fun;
{
- if (! flag_exceptions)
+ rtx filter = fun->eh->filter;
+ if (fun == cfun && ! filter)
{
- static int warned = 0;
- if (! warned && do_warn)
- {
- error ("exception handling disabled, use -fexceptions to enable");
- warned = 1;
- }
- return 0;
+ filter = gen_reg_rtx (word_mode);
+ fun->eh->filter = filter;
}
- return 1;
+ return filter;
}
+
+/* Begin a region that will contain entries created with
+ add_partial_entry. */
-/* Given a return address in ADDR, determine the address we should use
- to find the corresponding EH region. */
-
-rtx
-eh_outer_context (addr)
- rtx addr;
+void
+begin_protect_partials ()
{
- /* First mask out any unwanted bits. */
-#ifdef MASK_RETURN_ADDR
- expand_and (addr, MASK_RETURN_ADDR, addr);
-#endif
-
- /* Then adjust to find the real return address. */
-#if defined (RETURN_ADDR_OFFSET)
- addr = plus_constant (addr, RETURN_ADDR_OFFSET);
-#endif
-
- return addr;
+ /* Push room for a new list. */
+ cfun->eh->protect_list
+ = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
}
/* Start a new exception region for a region of code that has a
@@ -1121,1819 +1005,1904 @@ add_partial_entry (handler)
{
expand_eh_region_start ();
- /* Because this is a cleanup action, we may have to protect the handler
- with __terminate. */
- handler = protect_with_terminate (handler);
-
+ /* ??? This comment was old before the most recent rewrite. We
+ really ought to fix the callers at some point. */
/* For backwards compatibility, we allow callers to omit calls to
begin_protect_partials for the outermost region. So, we must
explicitly do so here. */
- if (!protect_list)
+ if (!cfun->eh->protect_list)
begin_protect_partials ();
/* Add this entry to the front of the list. */
- TREE_VALUE (protect_list)
- = tree_cons (NULL_TREE, handler, TREE_VALUE (protect_list));
+ TREE_VALUE (cfun->eh->protect_list)
+ = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
}
-/* Emit code to get EH context to current function. */
+/* End all the pending exception regions on protect_list. */
-static rtx
-call_get_eh_context ()
+void
+end_protect_partials ()
{
- static tree fn;
- tree expr;
-
- if (fn == NULL_TREE)
- {
- tree fntype;
- fn = get_identifier ("__get_eh_context");
- fntype = build_pointer_type (build_pointer_type
- (build_pointer_type (void_type_node)));
- fntype = build_function_type (fntype, NULL_TREE);
- fn = build_decl (FUNCTION_DECL, fn, fntype);
- DECL_EXTERNAL (fn) = 1;
- TREE_PUBLIC (fn) = 1;
- DECL_ARTIFICIAL (fn) = 1;
- TREE_READONLY (fn) = 1;
- make_decl_rtl (fn, NULL_PTR);
- assemble_external (fn);
+ tree t;
- ggc_add_tree_root (&fn, 1);
- }
+ /* ??? This comment was old before the most recent rewrite. We
+ really ought to fix the callers at some point. */
+ /* For backwards compatibility, we allow callers to omit the call to
+ begin_protect_partials for the outermost region. So,
+ PROTECT_LIST may be NULL. */
+ if (!cfun->eh->protect_list)
+ return;
- expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
- expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
- expr, NULL_TREE, NULL_TREE);
- TREE_SIDE_EFFECTS (expr) = 1;
+ /* Pop the topmost entry. */
+ t = TREE_VALUE (cfun->eh->protect_list);
+ cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
- return copy_to_reg (expand_expr (expr, NULL_RTX, VOIDmode, 0));
+ /* End all the exception regions. */
+ for (; t; t = TREE_CHAIN (t))
+ expand_eh_region_end_cleanup (TREE_VALUE (t));
}
-/* Get a reference to the EH context.
- We will only generate a register for the current function EH context here,
- and emit a USE insn to mark that this is a EH context register.
+
+/* This section is for the exception handling specific optimization pass. */
- Later, emit_eh_context will emit needed call to __get_eh_context
- in libgcc2, and copy the value to the register we have generated. */
+/* Random access the exception region tree. It's just as simple to
+ collect the regions this way as in expand_eh_region_start, but
+ without having to realloc memory. */
-rtx
-get_eh_context ()
+static void
+collect_eh_region_array ()
{
- if (current_function_ehc == 0)
- {
- rtx insn;
+ struct eh_region **array, *i;
- current_function_ehc = gen_reg_rtx (Pmode);
-
- insn = gen_rtx_USE (GET_MODE (current_function_ehc),
- current_function_ehc);
- insn = emit_insn_before (insn, get_first_nonparm_insn ());
-
- REG_NOTES (insn)
- = gen_rtx_EXPR_LIST (REG_EH_CONTEXT, current_function_ehc,
- REG_NOTES (insn));
- }
- return current_function_ehc;
-}
-
-/* Get a reference to the dynamic handler chain. It points to the
- pointer to the next element in the dynamic handler chain. It ends
- when there are no more elements in the dynamic handler chain, when
- the value is &top_elt from libgcc2.c. Immediately after the
- pointer, is an area suitable for setjmp/longjmp when
- DONT_USE_BUILTIN_SETJMP is defined, and an area suitable for
- __builtin_setjmp/__builtin_longjmp when DONT_USE_BUILTIN_SETJMP
- isn't defined. */
+ i = cfun->eh->region_tree;
+ if (! i)
+ return;
-rtx
-get_dynamic_handler_chain ()
+ array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
+ cfun->eh->region_array = array;
+
+ while (1)
+ {
+ array[i->region_number] = i;
+
+ /* If there are sub-regions, process them. */
+ if (i->inner)
+ i = i->inner;
+ /* If there are peers, process them. */
+ else if (i->next_peer)
+ i = i->next_peer;
+ /* Otherwise, step back up the tree to the next peer. */
+ else
+ {
+ do {
+ i = i->outer;
+ if (i == NULL)
+ return;
+ } while (i->next_peer == NULL);
+ i = i->next_peer;
+ }
+ }
+}
+
+static void
+resolve_fixup_regions ()
{
- rtx ehc, dhc, result;
+ int i, j, n = cfun->eh->last_region_number;
- ehc = get_eh_context ();
+ for (i = 1; i <= n; ++i)
+ {
+ struct eh_region *fixup = cfun->eh->region_array[i];
+ struct eh_region *cleanup;
- /* This is the offset of dynamic_handler_chain in the eh_context struct
- declared in eh-common.h. If its location is change, change this offset */
- dhc = plus_constant (ehc, POINTER_SIZE / BITS_PER_UNIT);
+ if (! fixup || fixup->type != ERT_FIXUP)
+ continue;
- result = copy_to_reg (dhc);
+ for (j = 1; j <= n; ++j)
+ {
+ cleanup = cfun->eh->region_array[j];
+ if (cleanup->type == ERT_CLEANUP
+ && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
+ break;
+ }
+ if (j > n)
+ abort ();
- /* We don't want a copy of the dcc, but rather, the single dcc. */
- return gen_rtx_MEM (Pmode, result);
+ fixup->u.fixup.real_region = cleanup->outer;
+ }
}
-/* Get a reference to the dynamic cleanup chain. It points to the
- pointer to the next element in the dynamic cleanup chain.
- Immediately after the pointer, are two Pmode variables, one for a
- pointer to a function that performs the cleanup action, and the
- second, the argument to pass to that function. */
+/* Now that we've discovered what region actually encloses a fixup,
+ we can shuffle pointers and remove them from the tree. */
-rtx
-get_dynamic_cleanup_chain ()
+static void
+remove_fixup_regions ()
{
- rtx dhc, dcc, result;
+ int i;
+ rtx insn, note;
+ struct eh_region *fixup;
+
+ /* Walk the insn chain and adjust the REG_EH_REGION numbers
+ for instructions referencing fixup regions. This is only
+ strictly necessary for fixup regions with no parent, but
+ doesn't hurt to do it for all regions. */
+ for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
+ if (INSN_P (insn)
+ && (note = find_reg_note (insn, REG_EH_REGION, NULL))
+ && INTVAL (XEXP (note, 0)) > 0
+ && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
+ && fixup->type == ERT_FIXUP)
+ {
+ if (fixup->u.fixup.real_region)
+ XEXP (note, 1) = GEN_INT (fixup->u.fixup.real_region->region_number);
+ else
+ remove_note (insn, note);
+ }
+
+ /* Remove the fixup regions from the tree. */
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ fixup = cfun->eh->region_array[i];
+ if (! fixup)
+ continue;
+
+ /* Allow GC to maybe free some memory. */
+ if (fixup->type == ERT_CLEANUP)
+ fixup->u.cleanup.exp = NULL_TREE;
- dhc = get_dynamic_handler_chain ();
- dcc = plus_constant (dhc, POINTER_SIZE / BITS_PER_UNIT);
+ if (fixup->type != ERT_FIXUP)
+ continue;
- result = copy_to_reg (dcc);
+ if (fixup->inner)
+ {
+ struct eh_region *parent, *p, **pp;
+
+ parent = fixup->u.fixup.real_region;
+
+ /* Fix up the children's parent pointers; find the end of
+ the list. */
+ for (p = fixup->inner; ; p = p->next_peer)
+ {
+ p->outer = parent;
+ if (! p->next_peer)
+ break;
+ }
- /* We don't want a copy of the dcc, but rather, the single dcc. */
- return gen_rtx_MEM (Pmode, result);
+ /* In the tree of cleanups, only outer-inner ordering matters.
+ So link the children back in anywhere at the correct level. */
+ if (parent)
+ pp = &parent->inner;
+ else
+ pp = &cfun->eh->region_tree;
+ p->next_peer = *pp;
+ *pp = fixup->inner;
+ fixup->inner = NULL;
+ }
+
+ remove_eh_handler (fixup);
+ }
}
-#ifdef DONT_USE_BUILTIN_SETJMP
-/* Generate code to evaluate X and jump to LABEL if the value is nonzero.
- LABEL is an rtx of code CODE_LABEL, in this function. */
+/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
+ can_throw instruction in the region. */
static void
-jumpif_rtx (x, label)
- rtx x;
- rtx label;
+convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
+ rtx *pinsns;
+ int *orig_sp;
+ int cur;
{
- jumpif (make_tree (type_for_mode (GET_MODE (x), 0), x), label);
-}
-#endif
+ int *sp = orig_sp;
+ rtx insn, next;
-/* Start a dynamic cleanup on the EH runtime dynamic cleanup stack.
- We just need to create an element for the cleanup list, and push it
- into the chain.
+ for (insn = *pinsns; insn ; insn = next)
+ {
+ next = NEXT_INSN (insn);
+ if (GET_CODE (insn) == NOTE)
+ {
+ int kind = NOTE_LINE_NUMBER (insn);
+ if (kind == NOTE_INSN_EH_REGION_BEG
+ || kind == NOTE_INSN_EH_REGION_END)
+ {
+ if (kind == NOTE_INSN_EH_REGION_BEG)
+ {
+ struct eh_region *r;
- A dynamic cleanup is a cleanup action implied by the presence of an
- element on the EH runtime dynamic cleanup stack that is to be
- performed when an exception is thrown. The cleanup action is
- performed by __sjthrow when an exception is thrown. Only certain
- actions can be optimized into dynamic cleanup actions. For the
- restrictions on what actions can be performed using this routine,
- see expand_eh_region_start_tree. */
+ *sp++ = cur;
+ cur = NOTE_EH_HANDLER (insn);
-static void
-start_dynamic_cleanup (func, arg)
- tree func;
- tree arg;
+ r = cfun->eh->region_array[cur];
+ if (r->type == ERT_FIXUP)
+ {
+ r = r->u.fixup.real_region;
+ cur = r ? r->region_number : 0;
+ }
+ else if (r->type == ERT_CATCH)
+ {
+ r = r->outer;
+ cur = r ? r->region_number : 0;
+ }
+ }
+ else
+ cur = *--sp;
+
+ /* Removing the first insn of a CALL_PLACEHOLDER sequence
+ requires extra care to adjust sequence start. */
+ if (insn == *pinsns)
+ *pinsns = next;
+ remove_insn (insn);
+ continue;
+ }
+ }
+ else if (INSN_P (insn))
+ {
+ if (cur > 0
+ && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
+ /* Calls can always potentially throw exceptions, unless
+ they have a REG_EH_REGION note with a value of 0 or less.
+ Which should be the only possible kind so far. */
+ && (GET_CODE (insn) == CALL_INSN
+ /* If we wanted exceptions for non-call insns, then
+ any may_trap_p instruction could throw. */
+ || (flag_non_call_exceptions
+ && may_trap_p (PATTERN (insn)))))
+ {
+ REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
+ REG_NOTES (insn));
+ }
+
+ if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ {
+ convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
+ sp, cur);
+ convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
+ sp, cur);
+ convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
+ sp, cur);
+ }
+ }
+ }
+
+ if (sp != orig_sp)
+ abort ();
+}
+
+void
+convert_from_eh_region_ranges ()
{
- rtx dcc;
- rtx new_func, new_arg;
- rtx x, buf;
- int size;
+ int *stack;
+ rtx insns;
- /* We allocate enough room for a pointer to the function, and
- one argument. */
- size = 2;
+ collect_eh_region_array ();
+ resolve_fixup_regions ();
- /* XXX, FIXME: The stack space allocated this way is too long lived,
- but there is no allocation routine that allocates at the level of
- the last binding contour. */
- buf = assign_stack_local (BLKmode,
- GET_MODE_SIZE (Pmode)*(size+1),
- 0);
+ stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
+ insns = get_insns ();
+ convert_from_eh_region_ranges_1 (&insns, stack, 0);
+ free (stack);
- buf = change_address (buf, Pmode, NULL_RTX);
+ remove_fixup_regions ();
+}
- /* Store dcc into the first word of the newly allocated buffer. */
+void
+find_exception_handler_labels ()
+{
+ rtx list = NULL_RTX;
+ int i;
- dcc = get_dynamic_cleanup_chain ();
- emit_move_insn (buf, dcc);
+ free_EXPR_LIST_list (&exception_handler_labels);
- /* Store func and arg into the cleanup list element. */
+ if (cfun->eh->region_tree == NULL)
+ return;
- new_func = gen_rtx_MEM (Pmode, plus_constant (XEXP (buf, 0),
- GET_MODE_SIZE (Pmode)));
- new_arg = gen_rtx_MEM (Pmode, plus_constant (XEXP (buf, 0),
- GET_MODE_SIZE (Pmode)*2));
- x = expand_expr (func, new_func, Pmode, 0);
- if (x != new_func)
- emit_move_insn (new_func, x);
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ struct eh_region *region = cfun->eh->region_array[i];
+ rtx lab;
- x = expand_expr (arg, new_arg, Pmode, 0);
- if (x != new_arg)
- emit_move_insn (new_arg, x);
+ if (! region)
+ continue;
+ if (cfun->eh->built_landing_pads)
+ lab = region->landing_pad;
+ else
+ lab = region->label;
- /* Update the cleanup chain. */
+ if (lab)
+ list = alloc_EXPR_LIST (0, lab, list);
+ }
- x = force_operand (XEXP (buf, 0), dcc);
- if (x != dcc)
- emit_move_insn (dcc, x);
-}
+ /* For sjlj exceptions, need the return label to remain live until
+ after landing pad generation. */
+ if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
+ list = alloc_EXPR_LIST (0, return_label, list);
-/* Emit RTL to start a dynamic handler on the EH runtime dynamic
- handler stack. This should only be used by expand_eh_region_start
- or expand_eh_region_start_tree. */
+ exception_handler_labels = list;
+}
-static void
-start_dynamic_handler ()
+
+static struct eh_region *
+duplicate_eh_region_1 (o, map)
+ struct eh_region *o;
+ struct inline_remap *map;
{
- rtx dhc, dcc;
- rtx arg, buf;
- int size;
-
-#ifndef DONT_USE_BUILTIN_SETJMP
- /* The number of Pmode words for the setjmp buffer, when using the
- builtin setjmp/longjmp, see expand_builtin, case BUILT_IN_LONGJMP. */
- /* We use 2 words here before calling expand_builtin_setjmp.
- expand_builtin_setjmp uses 2 words, and then calls emit_stack_save.
- emit_stack_save needs space of size STACK_SAVEAREA_MODE (SAVE_NONLOCAL).
- Subtract one, because the assign_stack_local call below adds 1. */
- size = (2 + 2 + (GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
- / GET_MODE_SIZE (Pmode))
- - 1);
-#else
-#ifdef JMP_BUF_SIZE
- size = JMP_BUF_SIZE;
-#else
- /* Should be large enough for most systems, if it is not,
- JMP_BUF_SIZE should be defined with the proper value. It will
- also tend to be larger than necessary for most systems, a more
- optimal port will define JMP_BUF_SIZE. */
- size = FIRST_PSEUDO_REGISTER+2;
-#endif
-#endif
- /* XXX, FIXME: The stack space allocated this way is too long lived,
- but there is no allocation routine that allocates at the level of
- the last binding contour. */
- arg = assign_stack_local (BLKmode,
- GET_MODE_SIZE (Pmode)*(size+1),
- 0);
+ struct eh_region *n
+ = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
- arg = change_address (arg, Pmode, NULL_RTX);
+ n->region_number = o->region_number + cfun->eh->last_region_number;
+ n->type = o->type;
- /* Store dhc into the first word of the newly allocated buffer. */
+ switch (n->type)
+ {
+ case ERT_CLEANUP:
+ case ERT_MUST_NOT_THROW:
+ break;
- dhc = get_dynamic_handler_chain ();
- dcc = gen_rtx_MEM (Pmode, plus_constant (XEXP (arg, 0),
- GET_MODE_SIZE (Pmode)));
- emit_move_insn (arg, dhc);
+ case ERT_TRY:
+ if (o->u.try.continue_label)
+ n->u.try.continue_label
+ = get_label_from_map (map,
+ CODE_LABEL_NUMBER (o->u.try.continue_label));
+ break;
- /* Zero out the start of the cleanup chain. */
- emit_move_insn (dcc, const0_rtx);
+ case ERT_CATCH:
+ n->u.catch.type = o->u.catch.type;
+ break;
- /* The jmpbuf starts two words into the area allocated. */
- buf = plus_constant (XEXP (arg, 0), GET_MODE_SIZE (Pmode)*2);
+ case ERT_ALLOWED_EXCEPTIONS:
+ n->u.allowed.type_list = o->u.allowed.type_list;
+ break;
-#ifdef DONT_USE_BUILTIN_SETJMP
- {
- rtx x;
- x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_CONST,
- TYPE_MODE (integer_type_node), 1,
- buf, Pmode);
- /* If we come back here for a catch, transfer control to the handler. */
- jumpif_rtx (x, ehstack.top->entry->exception_handler_label);
- }
-#else
- expand_builtin_setjmp_setup (buf,
- ehstack.top->entry->exception_handler_label);
-#endif
+ case ERT_THROW:
+ n->u.throw.type = o->u.throw.type;
+
+ default:
+ abort ();
+ }
- /* We are committed to this, so update the handler chain. */
+ if (o->label)
+ n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
+ if (o->resume)
+ {
+ n->resume = map->insn_map[INSN_UID (o->resume)];
+ if (n->resume == NULL)
+ abort ();
+ }
- emit_move_insn (dhc, force_operand (XEXP (arg, 0), NULL_RTX));
+ return n;
}
-/* Start an exception handling region for the given cleanup action.
- All instructions emitted after this point are considered to be part
- of the region until expand_eh_region_end is invoked. CLEANUP is
- the cleanup action to perform. The return value is true if the
- exception region was optimized away. If that case,
- expand_eh_region_end does not need to be called for this cleanup,
- nor should it be.
+static void
+duplicate_eh_region_2 (o, n_array)
+ struct eh_region *o;
+ struct eh_region **n_array;
+{
+ struct eh_region *n = n_array[o->region_number];
- This routine notices one particular common case in C++ code
- generation, and optimizes it so as to not need the exception
- region. It works by creating a dynamic cleanup action, instead of
- a using an exception region. */
+ switch (n->type)
+ {
+ case ERT_TRY:
+ n->u.try.catch = n_array[o->u.try.catch->region_number];
+ n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
+ break;
+
+ case ERT_CATCH:
+ if (o->u.catch.next_catch)
+ n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
+ if (o->u.catch.prev_catch)
+ n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
+ break;
+
+ default:
+ break;
+ }
+
+ if (o->outer)
+ n->outer = n_array[o->outer->region_number];
+ if (o->inner)
+ n->inner = n_array[o->inner->region_number];
+ if (o->next_peer)
+ n->next_peer = n_array[o->next_peer->region_number];
+}
int
-expand_eh_region_start_tree (decl, cleanup)
- tree decl;
- tree cleanup;
+duplicate_eh_regions (ifun, map)
+ struct function *ifun;
+ struct inline_remap *map;
{
- /* This is the old code. */
- if (! doing_eh (0))
- return 0;
+ int ifun_last_region_number = ifun->eh->last_region_number;
+ struct eh_region **n_array, *root, *cur;
+ int i;
- /* The optimization only applies to actions protected with
- terminate, and only applies if we are using the setjmp/longjmp
- codegen method. */
- if (exceptions_via_longjmp
- && protect_cleanup_actions_with_terminate)
- {
- tree func, arg;
- tree args;
+ if (ifun_last_region_number == 0)
+ return 0;
- /* Ignore any UNSAVE_EXPR. */
- if (TREE_CODE (cleanup) == UNSAVE_EXPR)
- cleanup = TREE_OPERAND (cleanup, 0);
-
- /* Further, it only applies if the action is a call, if there
- are 2 arguments, and if the second argument is 2. */
+ n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
- if (TREE_CODE (cleanup) == CALL_EXPR
- && (args = TREE_OPERAND (cleanup, 1))
- && (func = TREE_OPERAND (cleanup, 0))
- && (arg = TREE_VALUE (args))
- && (args = TREE_CHAIN (args))
+ for (i = 1; i <= ifun_last_region_number; ++i)
+ {
+ cur = ifun->eh->region_array[i];
+ if (!cur || cur->region_number != i)
+ continue;
+ n_array[i] = duplicate_eh_region_1 (cur, map);
+ }
+ for (i = 1; i <= ifun_last_region_number; ++i)
+ {
+ cur = ifun->eh->region_array[i];
+ if (!cur || cur->region_number != i)
+ continue;
+ duplicate_eh_region_2 (cur, n_array);
+ }
- /* is the second argument 2? */
- && TREE_CODE (TREE_VALUE (args)) == INTEGER_CST
- && compare_tree_int (TREE_VALUE (args), 2) == 0
+ root = n_array[ifun->eh->region_tree->region_number];
+ cur = cfun->eh->cur_region;
+ if (cur)
+ {
+ struct eh_region *p = cur->inner;
+ if (p)
+ {
+ while (p->next_peer)
+ p = p->next_peer;
+ p->next_peer = root;
+ }
+ else
+ cur->inner = root;
- /* Make sure there are no other arguments. */
- && TREE_CHAIN (args) == NULL_TREE)
+ for (i = 1; i <= ifun_last_region_number; ++i)
+ if (n_array[i]->outer == NULL)
+ n_array[i]->outer = cur;
+ }
+ else
+ {
+ struct eh_region *p = cfun->eh->region_tree;
+ if (p)
{
- /* Arrange for returns and gotos to pop the entry we make on the
- dynamic cleanup stack. */
- expand_dcc_cleanup (decl);
- start_dynamic_cleanup (func, arg);
- return 1;
+ while (p->next_peer)
+ p = p->next_peer;
+ p->next_peer = root;
}
+ else
+ cfun->eh->region_tree = root;
}
- expand_eh_region_start_for_decl (decl);
- ehstack.top->entry->finalization = cleanup;
+ free (n_array);
- return 0;
+ i = cfun->eh->last_region_number;
+ cfun->eh->last_region_number = i + ifun_last_region_number;
+ return i;
}
-/* Just like expand_eh_region_start, except if a cleanup action is
- entered on the cleanup chain, the TREE_PURPOSE of the element put
- on the chain is DECL. DECL should be the associated VAR_DECL, if
- any, otherwise it should be NULL_TREE. */
+
+/* ??? Move from tree.c to tree.h. */
+#define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
-void
-expand_eh_region_start_for_decl (decl)
- tree decl;
+static int
+t2r_eq (pentry, pdata)
+ const PTR pentry;
+ const PTR pdata;
{
- rtx note;
+ tree entry = (tree) pentry;
+ tree data = (tree) pdata;
- /* This is the old code. */
- if (! doing_eh (0))
- return;
+ return TREE_PURPOSE (entry) == data;
+}
- /* We need a new block to record the start and end of the
- dynamic handler chain. We also want to prevent jumping into
- a try block. */
- expand_start_bindings (2);
+static hashval_t
+t2r_hash (pentry)
+ const PTR pentry;
+{
+ tree entry = (tree) pentry;
+ return TYPE_HASH (TREE_PURPOSE (entry));
+}
- /* But we don't need or want a new temporary level. */
- pop_temp_slots ();
+static int
+t2r_mark_1 (slot, data)
+ PTR *slot;
+ PTR data ATTRIBUTE_UNUSED;
+{
+ tree contents = (tree) *slot;
+ ggc_mark_tree (contents);
+ return 1;
+}
+
+static void
+t2r_mark (addr)
+ PTR addr;
+{
+ htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
+}
- /* Mark this block as created by expand_eh_region_start. This
- is so that we can pop the block with expand_end_bindings
- automatically. */
- mark_block_as_eh_region ();
+static void
+add_type_for_runtime (type)
+ tree type;
+{
+ tree *slot;
- if (exceptions_via_longjmp)
+ slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
+ TYPE_HASH (type), INSERT);
+ if (*slot == NULL)
{
- /* Arrange for returns and gotos to pop the entry we make on the
- dynamic handler stack. */
- expand_dhc_cleanup (decl);
+ tree runtime = (*lang_eh_runtime_type) (type);
+ *slot = tree_cons (type, runtime, NULL_TREE);
}
-
- push_eh_entry (&ehstack);
- note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_BEG);
- NOTE_EH_HANDLER (note)
- = CODE_LABEL_NUMBER (ehstack.top->entry->exception_handler_label);
- if (exceptions_via_longjmp)
- start_dynamic_handler ();
}
+
+static tree
+lookup_type_for_runtime (type)
+ tree type;
+{
+ tree *slot;
-/* Start an exception handling region. All instructions emitted after
- this point are considered to be part of the region until
- expand_eh_region_end is invoked. */
+ slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
+ TYPE_HASH (type), NO_INSERT);
-void
-expand_eh_region_start ()
-{
- expand_eh_region_start_for_decl (NULL_TREE);
+ /* We should have always inserrted the data earlier. */
+ return TREE_VALUE (*slot);
}
-/* End an exception handling region. The information about the region
- is found on the top of ehstack.
+
+/* Represent an entry in @TTypes for either catch actions
+ or exception filter actions. */
+struct ttypes_filter
+{
+ tree t;
+ int filter;
+};
+
+/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
+ (a tree) for a @TTypes type node we are thinking about adding. */
- HANDLER is either the cleanup for the exception region, or if we're
- marking the end of a try block, HANDLER is integer_zero_node.
+static int
+ttypes_filter_eq (pentry, pdata)
+ const PTR pentry;
+ const PTR pdata;
+{
+ const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
+ tree data = (tree) pdata;
- HANDLER will be transformed to rtl when expand_leftover_cleanups
- is invoked. */
+ return entry->t == data;
+}
-void
-expand_eh_region_end (handler)
- tree handler;
+static hashval_t
+ttypes_filter_hash (pentry)
+ const PTR pentry;
{
- struct eh_entry *entry;
- struct eh_node *node;
- rtx note;
- int ret, r;
+ const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
+ return TYPE_HASH (entry->t);
+}
- if (! doing_eh (0))
- return;
+/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
+ exception specification list we are thinking about adding. */
+/* ??? Currently we use the type lists in the order given. Someone
+ should put these in some canonical order. */
- entry = pop_eh_entry (&ehstack);
+static int
+ehspec_filter_eq (pentry, pdata)
+ const PTR pentry;
+ const PTR pdata;
+{
+ const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
+ const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
- note = emit_note (NULL_PTR, NOTE_INSN_EH_REGION_END);
- ret = NOTE_EH_HANDLER (note)
- = CODE_LABEL_NUMBER (entry->exception_handler_label);
- if (exceptions_via_longjmp == 0 && ! flag_new_exceptions
- /* We share outer_context between regions; only emit it once. */
- && INSN_UID (entry->outer_context) == 0)
- {
- rtx label;
+ return type_list_equal (entry->t, data->t);
+}
- label = gen_label_rtx ();
- emit_jump (label);
+/* Hash function for exception specification lists. */
- /* Emit a label marking the end of this exception region that
- is used for rethrowing into the outer context. */
- emit_label (entry->outer_context);
- expand_internal_throw ();
+static hashval_t
+ehspec_filter_hash (pentry)
+ const PTR pentry;
+{
+ const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
+ hashval_t h = 0;
+ tree list;
- emit_label (label);
- }
+ for (list = entry->t; list ; list = TREE_CHAIN (list))
+ h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
+ return h;
+}
- entry->finalization = handler;
+/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
+ up the search. Return the filter value to be used. */
- /* create region entry in final exception table */
- r = new_eh_region_entry (NOTE_EH_HANDLER (note), entry->rethrow_label);
+static int
+add_ttypes_entry (ttypes_hash, type)
+ htab_t ttypes_hash;
+ tree type;
+{
+ struct ttypes_filter **slot, *n;
- enqueue_eh_entry (ehqueue, entry);
+ slot = (struct ttypes_filter **)
+ htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
- /* If we have already started ending the bindings, don't recurse. */
- if (is_eh_region ())
+ if ((n = *slot) == NULL)
{
- /* Because we don't need or want a new temporary level and
- because we didn't create one in expand_eh_region_start,
- create a fake one now to avoid removing one in
- expand_end_bindings. */
- push_temp_slots ();
+ /* Filter value is a 1 based table index. */
- mark_block_as_not_eh_region ();
+ n = (struct ttypes_filter *) xmalloc (sizeof (*n));
+ n->t = type;
+ n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
+ *slot = n;
- expand_end_bindings (NULL_TREE, 0, 0);
+ VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
}
- /* Go through the goto handlers in the queue, emitting their
- handlers if we now have enough information to do so. */
- for (node = ehqueue->head; node; node = node->chain)
- if (node->entry->goto_entry_p
- && node->entry->outer_context == entry->rethrow_label)
- emit_cleanup_handler (node->entry);
-
- /* We can't emit handlers for goto entries until their scopes are
- complete because we don't know where they need to rethrow to,
- yet. */
- if (entry->finalization != integer_zero_node
- && (!entry->goto_entry_p
- || find_func_region_from_symbol (entry->outer_context) != -1))
- emit_cleanup_handler (entry);
+ return n->filter;
}
-/* End the EH region for a goto fixup. We only need them in the region-based
- EH scheme. */
+/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
+ to speed up the search. Return the filter value to be used. */
-void
-expand_fixup_region_start ()
+static int
+add_ehspec_entry (ehspec_hash, ttypes_hash, list)
+ htab_t ehspec_hash;
+ htab_t ttypes_hash;
+ tree list;
{
- if (! doing_eh (0) || exceptions_via_longjmp)
- return;
+ struct ttypes_filter **slot, *n;
+ struct ttypes_filter dummy;
- expand_eh_region_start ();
- /* Mark this entry as the entry for a goto. */
- ehstack.top->entry->goto_entry_p = 1;
+ dummy.t = list;
+ slot = (struct ttypes_filter **)
+ htab_find_slot (ehspec_hash, &dummy, INSERT);
+
+ if ((n = *slot) == NULL)
+ {
+ /* Filter value is a -1 based byte index into a uleb128 buffer. */
+
+ n = (struct ttypes_filter *) xmalloc (sizeof (*n));
+ n->t = list;
+ n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
+ *slot = n;
+
+ /* Look up each type in the list and encode its filter
+ value as a uleb128. Terminate the list with 0. */
+ for (; list ; list = TREE_CHAIN (list))
+ push_uleb128 (&cfun->eh->ehspec_data,
+ add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
+ VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
+ }
+
+ return n->filter;
}
-/* End the EH region for a goto fixup. CLEANUP is the cleanup we just
- expanded; to avoid running it twice if it throws, we look through the
- ehqueue for a matching region and rethrow from its outer_context. */
+/* Generate the action filter values to be used for CATCH and
+ ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
+ we use lots of landing pads, and so every type or list can share
+ the same filter value, which saves table space. */
-void
-expand_fixup_region_end (cleanup)
- tree cleanup;
+static void
+assign_filter_values ()
{
- struct eh_node *node;
- int dont_issue;
-
- if (! doing_eh (0) || exceptions_via_longjmp)
- return;
+ int i;
+ htab_t ttypes, ehspec;
- for (node = ehstack.top; node && node->entry->finalization != cleanup; )
- node = node->chain;
- if (node == 0)
- for (node = ehqueue->head; node && node->entry->finalization != cleanup; )
- node = node->chain;
- if (node == 0)
- abort ();
+ VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
+ VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
- /* If the outer context label has not been issued yet, we don't want
- to issue it as a part of this region, unless this is the
- correct region for the outer context. If we did, then the label for
- the outer context will be WITHIN the begin/end labels,
- and we could get an infinte loop when it tried to rethrow, or just
- generally incorrect execution following a throw. */
+ ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
+ ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
- if (flag_new_exceptions)
- dont_issue = 0;
- else
- dont_issue = ((INSN_UID (node->entry->outer_context) == 0)
- && (ehstack.top->entry != node->entry));
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ struct eh_region *r = cfun->eh->region_array[i];
- ehstack.top->entry->outer_context = node->entry->outer_context;
+ /* Mind we don't process a region more than once. */
+ if (!r || r->region_number != i)
+ continue;
- /* Since we are rethrowing to the OUTER region, we know we don't need
- a jump around sequence for this region, so we'll pretend the outer
- context label has been issued by setting INSN_UID to 1, then clearing
- it again afterwards. */
+ switch (r->type)
+ {
+ case ERT_CATCH:
+ r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
+ break;
- if (dont_issue)
- INSN_UID (node->entry->outer_context) = 1;
+ case ERT_ALLOWED_EXCEPTIONS:
+ r->u.allowed.filter
+ = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
+ break;
- /* Just rethrow. size_zero_node is just a NOP. */
- expand_eh_region_end (size_zero_node);
+ default:
+ break;
+ }
+ }
- if (dont_issue)
- INSN_UID (node->entry->outer_context) = 0;
+ htab_delete (ttypes);
+ htab_delete (ehspec);
}
-/* If we are using the setjmp/longjmp EH codegen method, we emit a
- call to __sjthrow. Otherwise, we emit a call to __throw. */
-
-void
-emit_throw ()
+static void
+build_post_landing_pads ()
{
- if (exceptions_via_longjmp)
- {
- emit_library_call (sjthrow_libfunc, 0, VOIDmode, 0);
- }
- else
+ int i;
+
+ for (i = cfun->eh->last_region_number; i > 0; --i)
{
-#ifdef JUMP_TO_THROW
- emit_indirect_jump (throw_libfunc);
-#else
- emit_library_call (throw_libfunc, 0, VOIDmode, 0);
-#endif
- }
- emit_barrier ();
-}
+ struct eh_region *region = cfun->eh->region_array[i];
+ rtx seq;
-/* Throw the current exception. If appropriate, this is done by jumping
- to the next handler. */
+ /* Mind we don't process a region more than once. */
+ if (!region || region->region_number != i)
+ continue;
-void
-expand_internal_throw ()
-{
- emit_throw ();
-}
+ switch (region->type)
+ {
+ case ERT_TRY:
+ /* ??? Collect the set of all non-overlapping catch handlers
+ all the way up the chain until blocked by a cleanup. */
+ /* ??? Outer try regions can share landing pads with inner
+ try regions if the types are completely non-overlapping,
+ and there are no interveaning cleanups. */
-/* Called from expand_exception_blocks and expand_end_catch_block to
- emit any pending handlers/cleanups queued from expand_eh_region_end. */
+ region->post_landing_pad = gen_label_rtx ();
-void
-expand_leftover_cleanups ()
-{
- struct eh_entry *entry;
+ start_sequence ();
- for (entry = dequeue_eh_entry (ehqueue);
- entry;
- entry = dequeue_eh_entry (ehqueue))
- {
- /* A leftover try block. Shouldn't be one here. */
- if (entry->finalization == integer_zero_node)
- abort ();
+ emit_label (region->post_landing_pad);
- free (entry);
- }
-}
+ /* ??? It is mighty inconvenient to call back into the
+ switch statement generation code in expand_end_case.
+ Rapid prototyping sez a sequence of ifs. */
+ {
+ struct eh_region *c;
+ for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
+ {
+ /* ??? _Unwind_ForcedUnwind wants no match here. */
+ if (c->u.catch.type == NULL)
+ emit_jump (c->label);
+ else
+ emit_cmp_and_jump_insns (cfun->eh->filter,
+ GEN_INT (c->u.catch.filter),
+ EQ, NULL_RTX, word_mode,
+ 0, 0, c->label);
+ }
+ }
-/* Called at the start of a block of try statements. */
-void
-expand_start_try_stmts ()
-{
- if (! doing_eh (1))
- return;
+ /* We delay the generation of the _Unwind_Resume until we generate
+ landing pads. We emit a marker here so as to get good control
+ flow data in the meantime. */
+ region->resume
+ = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
+ emit_barrier ();
- expand_eh_region_start ();
-}
+ seq = get_insns ();
+ end_sequence ();
-/* Called to begin a catch clause. The parameter is the object which
- will be passed to the runtime type check routine. */
-void
-start_catch_handler (rtime)
- tree rtime;
-{
- rtx handler_label;
- int insn_region_num;
- int eh_region_entry;
+ emit_insns_before (seq, region->u.try.catch->label);
+ break;
- if (! doing_eh (1))
- return;
+ case ERT_ALLOWED_EXCEPTIONS:
+ region->post_landing_pad = gen_label_rtx ();
- handler_label = catchstack.top->entry->exception_handler_label;
- insn_region_num = CODE_LABEL_NUMBER (handler_label);
- eh_region_entry = find_func_region (insn_region_num);
+ start_sequence ();
- /* If we've already issued this label, pick a new one */
- if (catchstack.top->entry->label_used)
- handler_label = gen_exception_label ();
- else
- catchstack.top->entry->label_used = 1;
+ emit_label (region->post_landing_pad);
- receive_exception_label (handler_label);
+ emit_cmp_and_jump_insns (cfun->eh->filter,
+ GEN_INT (region->u.allowed.filter),
+ EQ, NULL_RTX, word_mode, 0, 0,
+ region->label);
- add_new_handler (eh_region_entry, get_new_handler (handler_label, rtime));
+ /* We delay the generation of the _Unwind_Resume until we generate
+ landing pads. We emit a marker here so as to get good control
+ flow data in the meantime. */
+ region->resume
+ = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
+ emit_barrier ();
- if (flag_new_exceptions && ! exceptions_via_longjmp)
- return;
+ seq = get_insns ();
+ end_sequence ();
- /* Under the old mechanism, as well as setjmp/longjmp, we need to
- issue code to compare 'rtime' to the value in eh_info, via the
- matching function in eh_info. If its is false, we branch around
- the handler we are about to issue. */
+ emit_insns_before (seq, region->label);
+ break;
- if (rtime != NULL_TREE && rtime != CATCH_ALL_TYPE)
- {
- rtx call_rtx, rtime_address;
+ case ERT_CLEANUP:
+ case ERT_MUST_NOT_THROW:
+ region->post_landing_pad = region->label;
+ break;
- if (catchstack.top->entry->false_label != NULL_RTX)
- {
- error ("Never issued previous false_label");
+ case ERT_CATCH:
+ case ERT_THROW:
+ /* Nothing to do. */
+ break;
+
+ default:
abort ();
}
- catchstack.top->entry->false_label = gen_exception_label ();
-
- rtime_address = expand_expr (rtime, NULL_RTX, Pmode, EXPAND_INITIALIZER);
-#ifdef POINTERS_EXTEND_UNSIGNED
- rtime_address = convert_memory_address (Pmode, rtime_address);
-#endif
- rtime_address = force_reg (Pmode, rtime_address);
-
- /* Now issue the call, and branch around handler if needed */
- call_rtx = emit_library_call_value (eh_rtime_match_libfunc, NULL_RTX,
- LCT_NORMAL,
- TYPE_MODE (integer_type_node),
- 1, rtime_address, Pmode);
-
- /* Did the function return true? */
- emit_cmp_and_jump_insns (call_rtx, const0_rtx, EQ, NULL_RTX,
- GET_MODE (call_rtx), 0, 0,
- catchstack.top->entry->false_label);
}
}
-/* Called to end a catch clause. If we aren't using the new exception
- model tabel mechanism, we need to issue the branch-around label
- for the end of the catch block. */
+/* Replace RESX patterns with jumps to the next handler if any, or calls to
+ _Unwind_Resume otherwise. */
-void
-end_catch_handler ()
+static void
+connect_post_landing_pads ()
{
- if (! doing_eh (1))
- return;
+ int i;
- if (flag_new_exceptions && ! exceptions_via_longjmp)
+ for (i = cfun->eh->last_region_number; i > 0; --i)
{
- emit_barrier ();
- return;
- }
-
- /* A NULL label implies the catch clause was a catch all or cleanup */
- if (catchstack.top->entry->false_label == NULL_RTX)
- return;
-
- emit_label (catchstack.top->entry->false_label);
- catchstack.top->entry->false_label = NULL_RTX;
-}
+ struct eh_region *region = cfun->eh->region_array[i];
+ struct eh_region *outer;
+ rtx seq;
+
+ /* Mind we don't process a region more than once. */
+ if (!region || region->region_number != i)
+ continue;
+
+ /* If there is no RESX, or it has been deleted by flow, there's
+ nothing to fix up. */
+ if (! region->resume || INSN_DELETED_P (region->resume))
+ continue;
+
+ /* Search for another landing pad in this function. */
+ for (outer = region->outer; outer ; outer = outer->outer)
+ if (outer->post_landing_pad)
+ break;
-/* Save away the current ehqueue. */
+ start_sequence ();
-void
-push_ehqueue ()
-{
- struct eh_queue *q;
- q = (struct eh_queue *) xcalloc (1, sizeof (struct eh_queue));
- q->next = ehqueue;
- ehqueue = q;
-}
+ if (outer)
+ emit_jump (outer->post_landing_pad);
+ else
+ emit_library_call (unwind_resume_libfunc, LCT_THROW,
+ VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
-/* Restore a previously pushed ehqueue. */
+ seq = get_insns ();
+ end_sequence ();
+ emit_insns_before (seq, region->resume);
-void
-pop_ehqueue ()
-{
- struct eh_queue *q;
- expand_leftover_cleanups ();
- q = ehqueue->next;
- free (ehqueue);
- ehqueue = q;
+ /* Leave the RESX to be deleted by flow. */
+ }
}
-/* Emit the handler specified by ENTRY. */
-
+
static void
-emit_cleanup_handler (entry)
- struct eh_entry *entry;
+dw2_build_landing_pads ()
{
- rtx prev;
- rtx handler_insns;
-
- /* Since the cleanup could itself contain try-catch blocks, we
- squirrel away the current queue and replace it when we are done
- with this function. */
- push_ehqueue ();
+ int i, j;
- /* Put these handler instructions in a sequence. */
- do_pending_stack_adjust ();
- start_sequence ();
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ struct eh_region *region = cfun->eh->region_array[i];
+ rtx seq;
- /* Emit the label for the cleanup handler for this region, and
- expand the code for the handler.
-
- Note that a catch region is handled as a side-effect here; for a
- try block, entry->finalization will contain integer_zero_node, so
- no code will be generated in the expand_expr call below. But, the
- label for the handler will still be emitted, so any code emitted
- after this point will end up being the handler. */
-
- receive_exception_label (entry->exception_handler_label);
+ /* Mind we don't process a region more than once. */
+ if (!region || region->region_number != i)
+ continue;
- /* register a handler for this cleanup region */
- add_new_handler (find_func_region (CODE_LABEL_NUMBER (entry->exception_handler_label)),
- get_new_handler (entry->exception_handler_label, NULL));
+ if (region->type != ERT_CLEANUP
+ && region->type != ERT_TRY
+ && region->type != ERT_ALLOWED_EXCEPTIONS)
+ continue;
- /* And now generate the insns for the cleanup handler. */
- expand_expr (entry->finalization, const0_rtx, VOIDmode, 0);
+ start_sequence ();
- prev = get_last_insn ();
- if (prev == NULL || GET_CODE (prev) != BARRIER)
- /* Code to throw out to outer context when we fall off end of the
- handler. We can't do this here for catch blocks, so it's done
- in expand_end_all_catch instead. */
- expand_rethrow (entry->outer_context);
+ region->landing_pad = gen_label_rtx ();
+ emit_label (region->landing_pad);
- /* Finish this sequence. */
- do_pending_stack_adjust ();
- handler_insns = get_insns ();
- end_sequence ();
+#ifdef HAVE_exception_receiver
+ if (HAVE_exception_receiver)
+ emit_insn (gen_exception_receiver ());
+ else
+#endif
+#ifdef HAVE_nonlocal_goto_receiver
+ if (HAVE_nonlocal_goto_receiver)
+ emit_insn (gen_nonlocal_goto_receiver ());
+ else
+#endif
+ { /* Nothing */ }
- /* And add it to the CATCH_CLAUSES. */
- push_to_full_sequence (catch_clauses, catch_clauses_last);
- emit_insns (handler_insns);
- end_full_sequence (&catch_clauses, &catch_clauses_last);
+ /* If the eh_return data registers are call-saved, then we
+ won't have considered them clobbered from the call that
+ threw. Kill them now. */
+ for (j = 0; ; ++j)
+ {
+ unsigned r = EH_RETURN_DATA_REGNO (j);
+ if (r == INVALID_REGNUM)
+ break;
+ if (! call_used_regs[r])
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
+ }
- /* Now we've left the handler. */
- pop_ehqueue ();
-}
+ emit_move_insn (cfun->eh->exc_ptr,
+ gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
+ emit_move_insn (cfun->eh->filter,
+ gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
-/* Generate RTL for the start of a group of catch clauses.
+ seq = get_insns ();
+ end_sequence ();
- It is responsible for starting a new instruction sequence for the
- instructions in the catch block, and expanding the handlers for the
- internally-generated exception regions nested within the try block
- corresponding to this catch block. */
+ emit_insns_before (seq, region->post_landing_pad);
+ }
+}
-void
-expand_start_all_catch ()
+
+struct sjlj_lp_info
{
- struct eh_entry *entry;
- tree label;
- rtx outer_context;
-
- if (! doing_eh (1))
- return;
+ int directly_reachable;
+ int action_index;
+ int dispatch_index;
+ int call_site_index;
+};
- outer_context = ehstack.top->entry->outer_context;
+static bool
+sjlj_find_directly_reachable_regions (lp_info)
+ struct sjlj_lp_info *lp_info;
+{
+ rtx insn;
+ bool found_one = false;
- /* End the try block. */
- expand_eh_region_end (integer_zero_node);
+ for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
+ {
+ struct eh_region *region;
+ tree type_thrown;
+ rtx note;
- emit_line_note (input_filename, lineno);
- label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
+ if (! INSN_P (insn))
+ continue;
- /* The label for the exception handling block that we will save.
- This is Lresume in the documentation. */
- expand_label (label);
-
- /* Push the label that points to where normal flow is resumed onto
- the top of the label stack. */
- push_label_entry (&caught_return_label_stack, NULL_RTX, label);
+ note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note || INTVAL (XEXP (note, 0)) <= 0)
+ continue;
- /* Start a new sequence for all the catch blocks. We will add this
- to the global sequence catch_clauses when we have completed all
- the handlers in this handler-seq. */
- start_sequence ();
+ region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
- /* Throw away entries in the queue that we won't need anymore. We
- need entries for regions that have ended but to which there might
- still be gotos pending. */
- for (entry = dequeue_eh_entry (ehqueue);
- entry->finalization != integer_zero_node;
- entry = dequeue_eh_entry (ehqueue))
- free (entry);
+ type_thrown = NULL_TREE;
+ if (region->type == ERT_THROW)
+ {
+ type_thrown = region->u.throw.type;
+ region = region->outer;
+ }
- /* At this point, all the cleanups are done, and the ehqueue now has
- the current exception region at its head. We dequeue it, and put it
- on the catch stack. */
- push_entry (&catchstack, entry);
+ /* Find the first containing region that might handle the exception.
+ That's the landing pad to which we will transfer control. */
+ for (; region; region = region->outer)
+ if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
+ break;
- /* If we are not doing setjmp/longjmp EH, because we are reordered
- out of line, we arrange to rethrow in the outer context. We need to
- do this because we are not physically within the region, if any, that
- logically contains this catch block. */
- if (! exceptions_via_longjmp)
- {
- expand_eh_region_start ();
- ehstack.top->entry->outer_context = outer_context;
+ if (region)
+ {
+ lp_info[region->region_number].directly_reachable = 1;
+ found_one = true;
+ }
}
+ return found_one;
}
-/* Finish up the catch block. At this point all the insns for the
- catch clauses have already been generated, so we only have to add
- them to the catch_clauses list. We also want to make sure that if
- we fall off the end of the catch clauses that we rethrow to the
- outer EH region. */
-
-void
-expand_end_all_catch ()
+static void
+sjlj_assign_call_site_values (dispatch_label, lp_info)
+ rtx dispatch_label;
+ struct sjlj_lp_info *lp_info;
{
- rtx new_catch_clause;
- struct eh_entry *entry;
+ htab_t ar_hash;
+ int i, index;
- if (! doing_eh (1))
- return;
+ /* First task: build the action table. */
- /* Dequeue the current catch clause region. */
- entry = pop_eh_entry (&catchstack);
- free (entry);
+ VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
+ ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
- if (! exceptions_via_longjmp)
- {
- rtx outer_context = ehstack.top->entry->outer_context;
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ if (lp_info[i].directly_reachable)
+ {
+ struct eh_region *r = cfun->eh->region_array[i];
+ r->landing_pad = dispatch_label;
+ lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
+ if (lp_info[i].action_index != -1)
+ cfun->uses_eh_lsda = 1;
+ }
- /* Finish the rethrow region. size_zero_node is just a NOP. */
- expand_eh_region_end (size_zero_node);
- /* New exceptions handling models will never have a fall through
- of a catch clause */
- if (!flag_new_exceptions)
- expand_rethrow (outer_context);
- }
- else
- expand_rethrow (NULL_RTX);
+ htab_delete (ar_hash);
- /* Code to throw out to outer context, if we fall off end of catch
- handlers. This is rethrow (Lresume, same id, same obj) in the
- documentation. We use Lresume because we know that it will throw
- to the correct context.
+ /* Next: assign dispatch values. In dwarf2 terms, this would be the
+ landing pad label for the region. For sjlj though, there is one
+ common landing pad from which we dispatch to the post-landing pads.
- In other words, if the catch handler doesn't exit or return, we
- do a "throw" (using the address of Lresume as the point being
- thrown from) so that the outer EH region can then try to process
- the exception. */
+ A region receives a dispatch index if it is directly reachable
+ and requires in-function processing. Regions that share post-landing
+ pads may share dispatch indicies. */
+ /* ??? Post-landing pad sharing doesn't actually happen at the moment
+ (see build_post_landing_pads) so we don't bother checking for it. */
- /* Now we have the complete catch sequence. */
- new_catch_clause = get_insns ();
- end_sequence ();
-
- /* This level of catch blocks is done, so set up the successful
- catch jump label for the next layer of catch blocks. */
- pop_label_entry (&caught_return_label_stack);
- pop_label_entry (&outer_context_label_stack);
-
- /* Add the new sequence of catches to the main one for this function. */
- push_to_full_sequence (catch_clauses, catch_clauses_last);
- emit_insns (new_catch_clause);
- end_full_sequence (&catch_clauses, &catch_clauses_last);
-
- /* Here we fall through into the continuation code. */
-}
+ index = 0;
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ if (lp_info[i].directly_reachable
+ && lp_info[i].action_index >= 0)
+ lp_info[i].dispatch_index = index++;
-/* Rethrow from the outer context LABEL. */
+ /* Finally: assign call-site values. If dwarf2 terms, this would be
+ the region number assigned by convert_to_eh_region_ranges, but
+ handles no-action and must-not-throw differently. */
-static void
-expand_rethrow (label)
- rtx label;
-{
- if (exceptions_via_longjmp)
- emit_throw ();
- else
- if (flag_new_exceptions)
+ call_site_base = 1;
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ if (lp_info[i].directly_reachable)
{
- rtx insn;
- int region;
- if (label == NULL_RTX)
- label = last_rethrow_symbol;
- emit_library_call (rethrow_libfunc, 0, VOIDmode, 1, label, Pmode);
- region = find_func_region (eh_region_from_symbol (label));
- /* If the region is -1, it doesn't exist yet. We shouldn't be
- trying to rethrow there yet. */
- if (region == -1)
- abort ();
- function_eh_regions[region].rethrow_ref = 1;
-
- /* Search backwards for the actual call insn. */
- insn = get_last_insn ();
- while (GET_CODE (insn) != CALL_INSN)
- insn = PREV_INSN (insn);
- delete_insns_since (insn);
-
- /* Mark the label/symbol on the call. */
- REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EH_RETHROW, label,
- REG_NOTES (insn));
- emit_barrier ();
+ int action = lp_info[i].action_index;
+
+ /* Map must-not-throw to otherwise unused call-site index 0. */
+ if (action == -2)
+ index = 0;
+ /* Map no-action to otherwise unused call-site index -1. */
+ else if (action == -1)
+ index = -1;
+ /* Otherwise, look it up in the table. */
+ else
+ index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
+
+ lp_info[i].call_site_index = index;
}
- else
- emit_jump (label);
}
-/* Begin a region that will contain entries created with
- add_partial_entry. */
-
-void
-begin_protect_partials ()
+static void
+sjlj_mark_call_sites (lp_info)
+ struct sjlj_lp_info *lp_info;
{
- /* Push room for a new list. */
- protect_list = tree_cons (NULL_TREE, NULL_TREE, protect_list);
-}
+ int last_call_site = -2;
+ rtx insn, mem;
-/* End all the pending exception regions on protect_list. The handlers
- will be emitted when expand_leftover_cleanups is invoked. */
+ mem = change_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
+ plus_constant (XEXP (cfun->eh->sjlj_fc, 0),
+ sjlj_fc_call_site_ofs));
-void
-end_protect_partials ()
-{
- tree t;
-
- /* For backwards compatibility, we allow callers to omit the call to
- begin_protect_partials for the outermost region. So,
- PROTECT_LIST may be NULL. */
- if (!protect_list)
- return;
+ for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
+ {
+ struct eh_region *region;
+ int this_call_site;
+ rtx note, before, p;
- /* End all the exception regions. */
- for (t = TREE_VALUE (protect_list); t; t = TREE_CHAIN (t))
- expand_eh_region_end (TREE_VALUE (t));
+ /* Reset value tracking at extended basic block boundaries. */
+ if (GET_CODE (insn) == CODE_LABEL)
+ last_call_site = -2;
- /* Pop the topmost entry. */
- protect_list = TREE_CHAIN (protect_list);
-
-}
+ if (! INSN_P (insn))
+ continue;
-/* Arrange for __terminate to be called if there is an unhandled throw
- from within E. */
+ note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note)
+ {
+ /* Calls (and trapping insns) without notes are outside any
+ exception handling region in this function. Mark them as
+ no action. */
+ if (GET_CODE (insn) == CALL_INSN
+ || (flag_non_call_exceptions
+ && may_trap_p (PATTERN (insn))))
+ this_call_site = -1;
+ else
+ continue;
+ }
+ else
+ {
+ /* Calls that are known to not throw need not be marked. */
+ if (INTVAL (XEXP (note, 0)) <= 0)
+ continue;
-tree
-protect_with_terminate (e)
- tree e;
-{
- /* We only need to do this when using setjmp/longjmp EH and the
- language requires it, as otherwise we protect all of the handlers
- at once, if we need to. */
- if (exceptions_via_longjmp && protect_cleanup_actions_with_terminate)
- {
- tree handler, result;
+ region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+ this_call_site = lp_info[region->region_number].call_site_index;
+ }
+
+ if (this_call_site == last_call_site)
+ continue;
+
+ /* Don't separate a call from it's argument loads. */
+ before = insn;
+ if (GET_CODE (insn) == CALL_INSN)
+ {
+ HARD_REG_SET parm_regs;
+ int nparm_regs;
+
+ /* Since different machines initialize their parameter registers
+ in different orders, assume nothing. Collect the set of all
+ parameter registers. */
+ CLEAR_HARD_REG_SET (parm_regs);
+ nparm_regs = 0;
+ for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
+ if (GET_CODE (XEXP (p, 0)) == USE
+ && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
+ {
+ if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
+ abort ();
+
+ /* We only care about registers which can hold function
+ arguments. */
+ if (! FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
+ continue;
+
+ SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
+ nparm_regs++;
+ }
+
+ /* Search backward for the first set of a register in this set. */
+ while (nparm_regs)
+ {
+ before = PREV_INSN (before);
- handler = make_node (RTL_EXPR);
- TREE_TYPE (handler) = void_type_node;
- RTL_EXPR_RTL (handler) = const0_rtx;
- TREE_SIDE_EFFECTS (handler) = 1;
- start_sequence_for_rtl_expr (handler);
+ /* Given that we've done no other optimizations yet,
+ the arguments should be immediately available. */
+ if (GET_CODE (before) == CODE_LABEL)
+ abort ();
- emit_library_call (terminate_libfunc, 0, VOIDmode, 0);
- emit_barrier ();
+ p = single_set (before);
+ if (p && GET_CODE (SET_DEST (p)) == REG
+ && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
+ && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
+ {
+ CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
+ nparm_regs--;
+ }
+ }
+ }
- RTL_EXPR_SEQUENCE (handler) = get_insns ();
+ start_sequence ();
+ emit_move_insn (mem, GEN_INT (this_call_site));
+ p = get_insns ();
end_sequence ();
-
- result = build (TRY_CATCH_EXPR, TREE_TYPE (e), e, handler);
- TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
- TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
- TREE_READONLY (result) = TREE_READONLY (e);
- e = result;
+ emit_insns_before (p, before);
+ last_call_site = this_call_site;
}
-
- return e;
}
-
-/* The exception table that we build that is used for looking up and
- dispatching exceptions, the current number of entries, and its
- maximum size before we have to extend it.
- The number in eh_table is the code label number of the exception
- handler for the region. This is added by add_eh_table_entry and
- used by output_exception_table_entry. */
+/* Construct the SjLj_Function_Context. */
-static int *eh_table = NULL;
-static int eh_table_size = 0;
-static int eh_table_max_size = 0;
+static void
+sjlj_emit_function_enter (dispatch_label)
+ rtx dispatch_label;
+{
+ rtx fn_begin, fc, mem, seq;
-/* Note the need for an exception table entry for region N. If we
- don't need to output an explicit exception table, avoid all of the
- extra work.
+ fc = cfun->eh->sjlj_fc;
- Called from final_scan_insn when a NOTE_INSN_EH_REGION_BEG is seen.
- (Or NOTE_INSN_EH_REGION_END sometimes)
- N is the NOTE_EH_HANDLER of the note, which comes from the code
- label number of the exception handler for the region. */
+ start_sequence ();
-void
-add_eh_table_entry (n)
- int n;
-{
-#ifndef OMIT_EH_TABLE
- if (eh_table_size >= eh_table_max_size)
+ /* We're storing this libcall's address into memory instead of
+ calling it directly. Thus, we must call assemble_external_libcall
+ here, as we can not depend on emit_library_call to do it for us. */
+ assemble_external_libcall (eh_personality_libfunc);
+ mem = change_address (fc, Pmode,
+ plus_constant (XEXP (fc, 0), sjlj_fc_personality_ofs));
+ emit_move_insn (mem, eh_personality_libfunc);
+
+ mem = change_address (fc, Pmode,
+ plus_constant (XEXP (fc, 0), sjlj_fc_lsda_ofs));
+ if (cfun->uses_eh_lsda)
{
- if (eh_table)
- {
- eh_table_max_size += eh_table_max_size>>1;
-
- if (eh_table_max_size < 0)
- abort ();
-
- eh_table = (int *) xrealloc (eh_table,
- eh_table_max_size * sizeof (int));
- }
- else
- {
- eh_table_max_size = 252;
- eh_table = (int *) xmalloc (eh_table_max_size * sizeof (int));
- }
+ char buf[20];
+ ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
+ emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
}
- eh_table[eh_table_size++] = n;
+ else
+ emit_move_insn (mem, const0_rtx);
- if (flag_new_exceptions)
- {
- /* We will output the exception table late in the compilation. That
- references type_info objects which should have already been output
- by that time. We explicitly mark those objects as being
- referenced now so we know to emit them. */
- struct handler_info *handler = get_first_handler (n);
-
- for (; handler; handler = handler->next)
- if (handler->type_info && handler->type_info != CATCH_ALL_TYPE)
- {
- tree tinfo = (tree)handler->type_info;
-
- tinfo = TREE_OPERAND (tinfo, 0);
- TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (tinfo)) = 1;
- }
- }
+#ifdef DONT_USE_BUILTIN_SETJMP
+ {
+ rtx x, note;
+ x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
+ TYPE_MODE (integer_type_node), 1,
+ plus_constant (XEXP (fc, 0),
+ sjlj_fc_jbuf_ofs), Pmode);
+
+ note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
+ NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
+
+ emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
+ TYPE_MODE (integer_type_node), 0, 0,
+ dispatch_label);
+ }
+#else
+ expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
+ dispatch_label);
#endif
-}
-/* Return a non-zero value if we need to output an exception table.
+ emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
+ 1, XEXP (fc, 0), Pmode);
- On some platforms, we don't have to output a table explicitly.
- This routine doesn't mean we don't have one. */
+ seq = get_insns ();
+ end_sequence ();
-int
-exception_table_p ()
-{
- if (eh_table)
- return 1;
+ /* ??? Instead of doing this at the beginning of the function,
+ do this in a block that is at loop level 0 and dominates all
+ can_throw_internal instructions. */
- return 0;
+ for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
+ if (GET_CODE (fn_begin) == NOTE
+ && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
+ break;
+ emit_insns_after (seq, fn_begin);
}
-/* Output the entry of the exception table corresponding to the
- exception region numbered N to file FILE.
+/* Call back from expand_function_end to know where we should put
+ the call to unwind_sjlj_unregister_libfunc if needed. */
- N is the code label number corresponding to the handler of the
- region. */
+void
+sjlj_emit_function_exit_after (after)
+ rtx after;
+{
+ cfun->eh->sjlj_exit_after = after;
+}
static void
-output_exception_table_entry (file, n)
- FILE *file;
- int n;
-{
- char buf[256];
- rtx sym;
- struct handler_info *handler = get_first_handler (n);
- int index = find_func_region (n);
- rtx rethrow;
-
- /* Form and emit the rethrow label, if needed */
- if (flag_new_exceptions
- && (handler || function_eh_regions[index].rethrow_ref))
- rethrow = function_eh_regions[index].rethrow_label;
- else
- rethrow = NULL_RTX;
+sjlj_emit_function_exit ()
+{
+ rtx seq;
- if (function_eh_regions[index].emitted)
- return;
- function_eh_regions[index].emitted = 1;
+ start_sequence ();
- for ( ; handler != NULL || rethrow != NULL_RTX; handler = handler->next)
- {
- /* rethrow label should indicate the LAST entry for a region */
- if (rethrow != NULL_RTX && (handler == NULL || handler->next == NULL))
- {
- ASM_GENERATE_INTERNAL_LABEL (buf, "LRTH", n);
- assemble_eh_label(buf);
- rethrow = NULL_RTX;
- }
+ emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
+ 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
- ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n);
- sym = gen_rtx_SYMBOL_REF (Pmode, buf);
- assemble_eh_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
+ seq = get_insns ();
+ end_sequence ();
- ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n);
- sym = gen_rtx_SYMBOL_REF (Pmode, buf);
- assemble_eh_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
-
- if (handler == NULL)
- assemble_eh_integer (GEN_INT (0), POINTER_SIZE / BITS_PER_UNIT, 1);
- else
- {
- ASM_GENERATE_INTERNAL_LABEL (buf, "L", handler->handler_number);
- sym = gen_rtx_SYMBOL_REF (Pmode, buf);
- assemble_eh_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
- }
-
- if (flag_new_exceptions)
- {
- if (handler == NULL || handler->type_info == NULL)
- assemble_eh_integer (const0_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
- else
- if (handler->type_info == CATCH_ALL_TYPE)
- assemble_eh_integer (GEN_INT (CATCH_ALL_TYPE),
- POINTER_SIZE / BITS_PER_UNIT, 1);
- else
- output_constant ((tree)(handler->type_info),
- POINTER_SIZE / BITS_PER_UNIT);
- }
- putc ('\n', file); /* blank line */
- /* We only output the first label under the old scheme */
- if (! flag_new_exceptions || handler == NULL)
- break;
- }
-}
-
-/* Output the exception table if we have and need one. */
-
-static short language_code = 0;
-static short version_code = 0;
-
-/* This routine will set the language code for exceptions. */
-void
-set_exception_lang_code (code)
- int code;
-{
- language_code = code;
-}
+ /* ??? Really this can be done in any block at loop level 0 that
+ post-dominates all can_throw_internal instructions. This is
+ the last possible moment. */
-/* This routine will set the language version code for exceptions. */
-void
-set_exception_version_code (code)
- int code;
-{
- version_code = code;
+ emit_insns_after (seq, cfun->eh->sjlj_exit_after);
}
-/* Free the EH table structures. */
-void
-free_exception_table ()
+static void
+sjlj_emit_dispatch_table (dispatch_label, lp_info)
+ rtx dispatch_label;
+ struct sjlj_lp_info *lp_info;
{
- if (eh_table)
- free (eh_table);
- clear_function_eh_region ();
-}
+ int i, first_reachable;
+ rtx mem, dispatch, seq, fc;
+
+ fc = cfun->eh->sjlj_fc;
+
+ start_sequence ();
+
+ emit_label (dispatch_label);
-/* Output the common content of an exception table. */
-void
-output_exception_table_data ()
-{
- int i;
- char buf[256];
- extern FILE *asm_out_file;
+#ifndef DONT_USE_BUILTIN_SETJMP
+ expand_builtin_setjmp_receiver (dispatch_label);
+#endif
+
+ /* Load up dispatch index, exc_ptr and filter values from the
+ function context. */
+ mem = change_address (fc, TYPE_MODE (integer_type_node),
+ plus_constant (XEXP (fc, 0), sjlj_fc_call_site_ofs));
+ dispatch = copy_to_reg (mem);
+
+ mem = change_address (fc, word_mode,
+ plus_constant (XEXP (fc, 0), sjlj_fc_data_ofs));
+ if (word_mode != Pmode)
+ {
+#ifdef POINTERS_EXTEND_UNSIGNED
+ mem = convert_memory_address (Pmode, mem);
+#else
+ mem = convert_to_mode (Pmode, mem, 0);
+#endif
+ }
+ emit_move_insn (cfun->eh->exc_ptr, mem);
+
+ mem = change_address (fc, word_mode,
+ plus_constant (XEXP (fc, 0),
+ sjlj_fc_data_ofs + UNITS_PER_WORD));
+ emit_move_insn (cfun->eh->filter, mem);
- if (flag_new_exceptions)
+ /* Jump to one of the directly reachable regions. */
+ /* ??? This really ought to be using a switch statement. */
+
+ first_reachable = 0;
+ for (i = cfun->eh->last_region_number; i > 0; --i)
{
- assemble_eh_integer (GEN_INT (NEW_EH_RUNTIME),
- POINTER_SIZE / BITS_PER_UNIT, 1);
- assemble_eh_integer (GEN_INT (language_code), 2 , 1);
- assemble_eh_integer (GEN_INT (version_code), 2 , 1);
+ if (! lp_info[i].directly_reachable
+ || lp_info[i].action_index < 0)
+ continue;
- /* Add enough padding to make sure table aligns on a pointer boundry. */
- i = GET_MODE_ALIGNMENT (ptr_mode) / BITS_PER_UNIT - 4;
- for ( ; i < 0; i = i + GET_MODE_ALIGNMENT (ptr_mode) / BITS_PER_UNIT)
- ;
- if (i != 0)
- assemble_eh_integer (const0_rtx, i , 1);
+ if (! first_reachable)
+ {
+ first_reachable = i;
+ continue;
+ }
- /* Generate the label for offset calculations on rethrows. */
- ASM_GENERATE_INTERNAL_LABEL (buf, "LRTH", 0);
- assemble_eh_label(buf);
+ emit_cmp_and_jump_insns (dispatch,
+ GEN_INT (lp_info[i].dispatch_index), EQ,
+ NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
+ cfun->eh->region_array[i]->post_landing_pad);
}
- for (i = 0; i < eh_table_size; ++i)
- output_exception_table_entry (asm_out_file, eh_table[i]);
+ seq = get_insns ();
+ end_sequence ();
+ emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
+ ->post_landing_pad));
}
-/* Output an exception table for the entire compilation unit. */
-void
-output_exception_table ()
+static void
+sjlj_build_landing_pads ()
{
- char buf[256];
- extern FILE *asm_out_file;
-
- if (! doing_eh (0) || ! eh_table)
- return;
+ struct sjlj_lp_info *lp_info;
- exception_section ();
+ lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
+ sizeof (struct sjlj_lp_info));
- /* Beginning marker for table. */
- assemble_eh_align (GET_MODE_ALIGNMENT (ptr_mode));
- assemble_eh_label ("__EXCEPTION_TABLE__");
+ if (sjlj_find_directly_reachable_regions (lp_info))
+ {
+ rtx dispatch_label = gen_label_rtx ();
- output_exception_table_data ();
+ cfun->eh->sjlj_fc
+ = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
+ int_size_in_bytes (sjlj_fc_type_node),
+ TYPE_ALIGN (sjlj_fc_type_node));
- /* Ending marker for table. */
- /* Generate the label for end of table. */
- ASM_GENERATE_INTERNAL_LABEL (buf, "LRTH", CODE_LABEL_NUMBER (final_rethrow));
- assemble_eh_label(buf);
- assemble_eh_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
+ sjlj_assign_call_site_values (dispatch_label, lp_info);
+ sjlj_mark_call_sites (lp_info);
- /* For binary compatibility, the old __throw checked the second
- position for a -1, so we should output at least 2 -1's */
- if (! flag_new_exceptions)
- assemble_eh_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
+ sjlj_emit_function_enter (dispatch_label);
+ sjlj_emit_dispatch_table (dispatch_label, lp_info);
+ sjlj_emit_function_exit ();
+ }
- putc ('\n', asm_out_file); /* blank line */
+ free (lp_info);
}
-/* Used by the ia64 unwind format to output data for an individual
- function. */
void
-output_function_exception_table ()
+finish_eh_generation ()
{
- extern FILE *asm_out_file;
-
- if (! doing_eh (0) || ! eh_table)
+ /* Nothing to do if no regions created. */
+ if (cfun->eh->region_tree == NULL)
return;
-#ifdef HANDLER_SECTION
- HANDLER_SECTION;
-#endif
-
- output_exception_table_data ();
+ /* The object here is to provide find_basic_blocks with detailed
+ information (via reachable_handlers) on how exception control
+ flows within the function. In this first pass, we can include
+ type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
+ regions, and hope that it will be useful in deleting unreachable
+ handlers. Subsequently, we will generate landing pads which will
+ connect many of the handlers, and then type information will not
+ be effective. Still, this is a win over previous implementations. */
+
+ jump_optimize_minimal (get_insns ());
+ find_basic_blocks (get_insns (), max_reg_num (), 0);
+ cleanup_cfg ();
+
+ /* These registers are used by the landing pads. Make sure they
+ have been generated. */
+ get_exception_pointer (cfun);
+ get_exception_filter (cfun);
+
+ /* Construct the landing pads. */
+
+ assign_filter_values ();
+ build_post_landing_pads ();
+ connect_post_landing_pads ();
+ if (USING_SJLJ_EXCEPTIONS)
+ sjlj_build_landing_pads ();
+ else
+ dw2_build_landing_pads ();
- /* Ending marker for table. */
- assemble_eh_integer (constm1_rtx, POINTER_SIZE / BITS_PER_UNIT, 1);
+ cfun->eh->built_landing_pads = 1;
- putc ('\n', asm_out_file); /* blank line */
+ /* We've totally changed the CFG. Start over. */
+ find_exception_handler_labels ();
+ jump_optimize_minimal (get_insns ());
+ find_basic_blocks (get_insns (), max_reg_num (), 0);
+ cleanup_cfg ();
}
-
-/* Emit code to get EH context.
-
- We have to scan thru the code to find possible EH context registers.
- Inlined functions may use it too, and thus we'll have to be able
- to change them too.
+/* This section handles removing dead code for flow. */
- This is done only if using exceptions_via_longjmp. */
+/* Remove LABEL from the exception_handler_labels list. */
-void
-emit_eh_context ()
+static void
+remove_exception_handler_label (label)
+ rtx label;
{
- rtx insn;
- rtx ehc = 0;
-
- if (! doing_eh (0))
- return;
-
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == INSN
- && GET_CODE (PATTERN (insn)) == USE)
- {
- rtx reg = find_reg_note (insn, REG_EH_CONTEXT, 0);
- if (reg)
- {
- rtx insns;
-
- start_sequence ();
-
- /* If this is the first use insn, emit the call here. This
- will always be at the top of our function, because if
- expand_inline_function notices a REG_EH_CONTEXT note, it
- adds a use insn to this function as well. */
- if (ehc == 0)
- ehc = call_get_eh_context ();
+ rtx *pl, l;
- emit_move_insn (XEXP (reg, 0), ehc);
- insns = get_insns ();
- end_sequence ();
+ for (pl = &exception_handler_labels, l = *pl;
+ XEXP (l, 0) != label;
+ pl = &XEXP (l, 1), l = *pl)
+ continue;
- emit_insns_before (insns, insn);
- }
- }
+ *pl = XEXP (l, 1);
+ free_EXPR_LIST_node (l);
}
-/* Scan the insn chain F and build a list of handler labels. The
- resulting list is placed in the global variable exception_handler_labels. */
+/* Splice REGION from the region tree etc. */
static void
-find_exception_handler_labels_1 (f)
- rtx f;
+remove_eh_handler (region)
+ struct eh_region *region;
{
- rtx insn;
+ struct eh_region **pp, *p;
+ rtx lab;
+ int i;
- /* For each start of a region, add its label to the list. */
+ /* For the benefit of efficiently handling REG_EH_REGION notes,
+ replace this region in the region array with its containing
+ region. Note that previous region deletions may result in
+ multiple copies of this region in the array, so we have to
+ search the whole thing. */
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ if (cfun->eh->region_array[i] == region)
+ cfun->eh->region_array[i] = region->outer;
+
+ if (cfun->eh->built_landing_pads)
+ lab = region->landing_pad;
+ else
+ lab = region->label;
+ if (lab)
+ remove_exception_handler_label (lab);
+
+ if (region->outer)
+ pp = &region->outer->inner;
+ else
+ pp = &cfun->eh->region_tree;
+ for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
+ continue;
- for (insn = f; insn; insn = NEXT_INSN (insn))
+ if (region->inner)
{
- struct handler_info* ptr;
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
- {
- ptr = get_first_handler (NOTE_EH_HANDLER (insn));
- for ( ; ptr; ptr = ptr->next)
- {
- /* make sure label isn't in the list already */
- rtx x;
- for (x = exception_handler_labels; x; x = XEXP (x, 1))
- if (XEXP (x, 0) == ptr->handler_label)
- break;
- if (! x)
- exception_handler_labels = gen_rtx_EXPR_LIST (VOIDmode,
- ptr->handler_label, exception_handler_labels);
- }
- }
- else if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
- {
- find_exception_handler_labels_1 (XEXP (PATTERN (insn), 0));
- find_exception_handler_labels_1 (XEXP (PATTERN (insn), 1));
- find_exception_handler_labels_1 (XEXP (PATTERN (insn), 2));
- }
+ for (p = region->inner; p->next_peer ; p = p->next_peer)
+ p->outer = region->outer;
+ p->next_peer = region->next_peer;
+ p->outer = region->outer;
+ *pp = region->inner;
}
-}
-
-/* Scan the current insns and build a list of handler labels. The
- resulting list is placed in the global variable exception_handler_labels.
+ else
+ *pp = region->next_peer;
- It is called after the last exception handling region is added to
- the current function (when the rtl is almost all built for the
- current function) and before the jump optimization pass. */
-void
-find_exception_handler_labels ()
-{
- exception_handler_labels = NULL_RTX;
+ if (region->type == ERT_CATCH)
+ {
+ struct eh_region *try, *next, *prev;
- /* If we aren't doing exception handling, there isn't much to check. */
- if (! doing_eh (0))
- return;
+ for (try = region->next_peer;
+ try->type == ERT_CATCH;
+ try = try->next_peer)
+ continue;
+ if (try->type != ERT_TRY)
+ abort ();
- find_exception_handler_labels_1 (get_insns ());
-}
+ next = region->u.catch.next_catch;
+ prev = region->u.catch.prev_catch;
-/* Return a value of 1 if the parameter label number is an exception handler
- label. Return 0 otherwise. */
+ if (next)
+ next->u.catch.prev_catch = prev;
+ else
+ try->u.try.last_catch = prev;
+ if (prev)
+ prev->u.catch.next_catch = next;
+ else
+ {
+ try->u.try.catch = next;
+ if (! next)
+ remove_eh_handler (try);
+ }
+ }
-int
-is_exception_handler_label (lab)
- int lab;
-{
- rtx x;
- for (x = exception_handler_labels ; x ; x = XEXP (x, 1))
- if (lab == CODE_LABEL_NUMBER (XEXP (x, 0)))
- return 1;
- return 0;
+ free (region);
}
-/* Perform sanity checking on the exception_handler_labels list.
-
- Can be called after find_exception_handler_labels is called to
- build the list of exception handlers for the current function and
- before we finish processing the current function. */
+/* LABEL heads a basic block that is about to be deleted. If this
+ label corresponds to an exception region, we may be able to
+ delete the region. */
void
-check_exception_handler_labels ()
+maybe_remove_eh_handler (label)
+ rtx label;
{
- rtx insn, insn2;
+ int i;
- /* If we aren't doing exception handling, there isn't much to check. */
- if (! doing_eh (0))
+ /* ??? After generating landing pads, it's not so simple to determine
+ if the region data is completely unused. One must examine the
+ landing pad and the post landing pad, and whether an inner try block
+ is referencing the catch handlers directly. */
+ if (cfun->eh->built_landing_pads)
return;
- /* Make sure there is no more than 1 copy of a label */
- for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
- {
- int count = 0;
- for (insn2 = exception_handler_labels; insn2; insn2 = XEXP (insn2, 1))
- if (XEXP (insn, 0) == XEXP (insn2, 0))
- count++;
- if (count != 1)
- warning ("Counted %d copies of EH region %d in list.\n", count,
- CODE_LABEL_NUMBER (insn));
- }
-
-}
-
-/* Mark the children of NODE for GC. */
-
-static void
-mark_eh_node (node)
- struct eh_node *node;
-{
- while (node)
+ for (i = cfun->eh->last_region_number; i > 0; --i)
{
- if (node->entry)
+ struct eh_region *region = cfun->eh->region_array[i];
+ if (region && region->label == label)
{
- ggc_mark_rtx (node->entry->outer_context);
- ggc_mark_rtx (node->entry->exception_handler_label);
- ggc_mark_tree (node->entry->finalization);
- ggc_mark_rtx (node->entry->false_label);
- ggc_mark_rtx (node->entry->rethrow_label);
+ /* Flow will want to remove MUST_NOT_THROW regions as unreachable
+ because there is no path to the fallback call to terminate.
+ But the region continues to affect call-site data until there
+ are no more contained calls, which we don't see here. */
+ if (region->type == ERT_MUST_NOT_THROW)
+ {
+ remove_exception_handler_label (region->label);
+ region->label = NULL_RTX;
+ }
+ else
+ remove_eh_handler (region);
+ break;
}
- node = node ->chain;
}
}
-/* Mark S for GC. */
+
+/* This section describes CFG exception edges for flow. */
-static void
-mark_eh_stack (s)
- struct eh_stack *s;
+/* For communicating between calls to reachable_next_level. */
+struct reachable_info
{
- if (s)
- mark_eh_node (s->top);
-}
+ tree types_caught;
+ tree types_allowed;
+ rtx handlers;
+};
-/* Mark Q for GC. */
+/* A subroutine of reachable_next_level. Return true if TYPE, or a
+ base class of TYPE, is in HANDLED. */
-static void
-mark_eh_queue (q)
- struct eh_queue *q;
+static int
+check_handled (handled, type)
+ tree handled, type;
{
- while (q)
+ tree t;
+
+ /* We can check for exact matches without front-end help. */
+ if (! lang_eh_type_covers)
{
- mark_eh_node (q->head);
- q = q->next;
+ for (t = handled; t ; t = TREE_CHAIN (t))
+ if (TREE_VALUE (t) == type)
+ return 1;
}
+ else
+ {
+ for (t = handled; t ; t = TREE_CHAIN (t))
+ if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
+ return 1;
+ }
+
+ return 0;
}
-/* Mark NODE for GC. A label_node contains a union containing either
- a tree or an rtx. This label_node will contain a tree. */
+/* A subroutine of reachable_next_level. If we are collecting a list
+ of handlers, add one. After landing pad generation, reference
+ it instead of the handlers themselves. Further, the handlers are
+ all wired together, so by referencing one, we've got them all.
+ Before landing pad generation we reference each handler individually.
+
+ LP_REGION contains the landing pad; REGION is the handler. */
static void
-mark_tree_label_node (node)
- struct label_node *node;
+add_reachable_handler (info, lp_region, region)
+ struct reachable_info *info;
+ struct eh_region *lp_region;
+ struct eh_region *region;
{
- while (node)
+ if (! info)
+ return;
+
+ if (cfun->eh->built_landing_pads)
{
- ggc_mark_tree (node->u.tlabel);
- node = node->chain;
+ if (! info->handlers)
+ info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
}
+ else
+ info->handlers = alloc_INSN_LIST (region->label, info->handlers);
}
-/* Mark EH for GC. */
+/* Process one level of exception regions for reachability.
+ If TYPE_THROWN is non-null, then it is the *exact* type being
+ propagated. If INFO is non-null, then collect handler labels
+ and caught/allowed type information between invocations. */
-void
-mark_eh_status (eh)
- struct eh_status *eh;
+static enum reachable_code
+reachable_next_level (region, type_thrown, info)
+ struct eh_region *region;
+ tree type_thrown;
+ struct reachable_info *info;
{
- if (eh == 0)
- return;
+ switch (region->type)
+ {
+ case ERT_CLEANUP:
+ /* Before landing-pad generation, we model control flow
+ directly to the individual handlers. In this way we can
+ see that catch handler types may shadow one another. */
+ add_reachable_handler (info, region, region);
+ return RNL_MAYBE_CAUGHT;
+
+ case ERT_TRY:
+ {
+ struct eh_region *c;
+ enum reachable_code ret = RNL_NOT_CAUGHT;
- mark_eh_stack (&eh->x_ehstack);
- mark_eh_stack (&eh->x_catchstack);
- mark_eh_queue (eh->x_ehqueue);
- ggc_mark_rtx (eh->x_catch_clauses);
+ for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
+ {
+ /* A catch-all handler ends the search. */
+ /* ??? _Unwind_ForcedUnwind will want outer cleanups
+ to be run as well. */
+ if (c->u.catch.type == NULL)
+ {
+ add_reachable_handler (info, region, c);
+ return RNL_CAUGHT;
+ }
+
+ if (type_thrown)
+ {
+ /* If we have a type match, end the search. */
+ if (c->u.catch.type == type_thrown
+ || (lang_eh_type_covers
+ && (*lang_eh_type_covers) (c->u.catch.type,
+ type_thrown)))
+ {
+ add_reachable_handler (info, region, c);
+ return RNL_CAUGHT;
+ }
+
+ /* If we have definitive information of a match failure,
+ the catch won't trigger. */
+ if (lang_eh_type_covers)
+ return RNL_NOT_CAUGHT;
+ }
+
+ if (! info)
+ ret = RNL_MAYBE_CAUGHT;
+
+ /* A type must not have been previously caught. */
+ else if (! check_handled (info->types_caught, c->u.catch.type))
+ {
+ add_reachable_handler (info, region, c);
+ info->types_caught = tree_cons (NULL, c->u.catch.type,
+ info->types_caught);
+
+ /* ??? If the catch type is a base class of every allowed
+ type, then we know we can stop the search. */
+ ret = RNL_MAYBE_CAUGHT;
+ }
+ }
- if (lang_mark_false_label_stack)
- (*lang_mark_false_label_stack) (eh->x_false_label_stack);
- mark_tree_label_node (eh->x_caught_return_label_stack);
+ return ret;
+ }
- ggc_mark_tree (eh->x_protect_list);
- ggc_mark_rtx (eh->ehc);
- ggc_mark_rtx (eh->x_eh_return_stub_label);
-}
+ case ERT_ALLOWED_EXCEPTIONS:
+ /* An empty list of types definitely ends the search. */
+ if (region->u.allowed.type_list == NULL_TREE)
+ {
+ add_reachable_handler (info, region, region);
+ return RNL_CAUGHT;
+ }
-/* Mark ARG (which is really a struct func_eh_entry**) for GC. */
+ /* Collect a list of lists of allowed types for use in detecting
+ when a catch may be transformed into a catch-all. */
+ if (info)
+ info->types_allowed = tree_cons (NULL_TREE,
+ region->u.allowed.type_list,
+ info->types_allowed);
+
+ /* If we have definitive information about the type heirarchy,
+ then we can tell if the thrown type will pass through the
+ filter. */
+ if (type_thrown && lang_eh_type_covers)
+ {
+ if (check_handled (region->u.allowed.type_list, type_thrown))
+ return RNL_NOT_CAUGHT;
+ else
+ {
+ add_reachable_handler (info, region, region);
+ return RNL_CAUGHT;
+ }
+ }
-static void
-mark_func_eh_entry (arg)
- void *arg;
-{
- struct func_eh_entry *fee;
- struct handler_info *h;
- int i;
+ add_reachable_handler (info, region, region);
+ return RNL_MAYBE_CAUGHT;
- fee = *((struct func_eh_entry **) arg);
+ case ERT_CATCH:
+ /* Catch regions are handled by their controling try region. */
+ return RNL_NOT_CAUGHT;
- for (i = 0; i < current_func_eh_entry; ++i)
- {
- ggc_mark_rtx (fee->rethrow_label);
- for (h = fee->handlers; h; h = h->next)
+ case ERT_MUST_NOT_THROW:
+ /* Here we end our search, since no exceptions may propagate.
+ If we've touched down at some landing pad previous, then the
+ explicit function call we generated may be used. Otherwise
+ the call is made by the runtime. */
+ if (info && info->handlers)
{
- ggc_mark_rtx (h->handler_label);
- if (h->type_info != CATCH_ALL_TYPE)
- ggc_mark_tree ((tree) h->type_info);
+ add_reachable_handler (info, region, region);
+ return RNL_CAUGHT;
}
+ else
+ return RNL_BLOCKED;
- /* Skip to the next entry in the array. */
- ++fee;
+ case ERT_THROW:
+ case ERT_FIXUP:
+ /* Shouldn't see these here. */
+ break;
}
-}
-/* This group of functions initializes the exception handling data
- structures at the start of the compilation, initializes the data
- structures at the start of a function, and saves and restores the
- exception handling data structures for the start/end of a nested
- function. */
-
-/* Toplevel initialization for EH things. */
-
-void
-init_eh ()
-{
- first_rethrow_symbol = create_rethrow_ref (0);
- final_rethrow = gen_exception_label ();
- last_rethrow_symbol = create_rethrow_ref (CODE_LABEL_NUMBER (final_rethrow));
-
- ggc_add_rtx_root (&exception_handler_labels, 1);
- ggc_add_rtx_root (&eh_return_context, 1);
- ggc_add_rtx_root (&eh_return_stack_adjust, 1);
- ggc_add_rtx_root (&eh_return_handler, 1);
- ggc_add_rtx_root (&first_rethrow_symbol, 1);
- ggc_add_rtx_root (&final_rethrow, 1);
- ggc_add_rtx_root (&last_rethrow_symbol, 1);
- ggc_add_root (&function_eh_regions, 1, sizeof (function_eh_regions),
- mark_func_eh_entry);
+ abort ();
}
-
-/* Initialize the per-function EH information. */
-void
-init_eh_for_function ()
-{
- cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
- ehqueue = (struct eh_queue *) xcalloc (1, sizeof (struct eh_queue));
- eh_return_context = NULL_RTX;
- eh_return_stack_adjust = NULL_RTX;
- eh_return_handler = NULL_RTX;
-}
+/* Retrieve a list of labels of exception handlers which can be
+ reached by a given insn. */
-void
-free_eh_status (f)
- struct function *f;
-{
- free (f->eh->x_ehqueue);
- free (f->eh);
- f->eh = NULL;
-}
-
-/* This section is for the exception handling specific optimization
- pass. */
-
-/* Determine if the given INSN can throw an exception. */
-
-int
-can_throw (insn)
+rtx
+reachable_handlers (insn)
rtx insn;
{
- if (GET_CODE (insn) == INSN
- && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
+ struct reachable_info info;
+ struct eh_region *region;
+ tree type_thrown;
+ int region_number;
- /* Calls can always potentially throw exceptions, unless they have
- a REG_EH_REGION note with a value of 0 or less. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (GET_CODE (insn) == JUMP_INSN
+ && GET_CODE (PATTERN (insn)) == RESX)
+ region_number = XINT (PATTERN (insn), 0);
+ else
{
rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
- if (!note || INTVAL (XEXP (note, 0)) > 0)
- return 1;
+ if (!note || INTVAL (XEXP (note, 0)) <= 0)
+ return NULL;
+ region_number = INTVAL (XEXP (note, 0));
}
- if (asynchronous_exceptions)
+ memset (&info, 0, sizeof (info));
+
+ region = cfun->eh->region_array[region_number];
+
+ type_thrown = NULL_TREE;
+ if (region->type == ERT_THROW)
{
- /* If we wanted asynchronous exceptions, then everything but NOTEs
- and CODE_LABELs could throw. */
- if (GET_CODE (insn) != NOTE && GET_CODE (insn) != CODE_LABEL)
- return 1;
+ type_thrown = region->u.throw.type;
+ region = region->outer;
}
+ else if (GET_CODE (insn) == JUMP_INSN
+ && GET_CODE (PATTERN (insn)) == RESX)
+ region = region->outer;
- return 0;
+ for (; region; region = region->outer)
+ if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
+ break;
+
+ return info.handlers;
}
-/* Return nonzero if nothing in this function can throw. */
+/* Determine if the given INSN can throw an exception that is caught
+ within the function. */
-int
-nothrow_function_p ()
+bool
+can_throw_internal (insn)
+ rtx insn;
{
- rtx insn;
+ struct eh_region *region;
+ tree type_thrown;
+ rtx note;
- if (! flag_exceptions)
- return 1;
+ if (! INSN_P (insn))
+ return false;
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- if (can_throw (insn))
- return 0;
- for (insn = current_function_epilogue_delay_list; insn;
- insn = XEXP (insn, 1))
- if (can_throw (insn))
- return 0;
+ if (GET_CODE (insn) == INSN
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ insn = XVECEXP (PATTERN (insn), 0, 0);
- return 1;
-}
+ if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ {
+ int i;
+ for (i = 0; i < 3; ++i)
+ {
+ rtx sub = XEXP (PATTERN (insn), i);
+ for (; sub ; sub = NEXT_INSN (sub))
+ if (can_throw_internal (sub))
+ return true;
+ }
+ return false;
+ }
-/* Scan a exception region looking for the matching end and then
- remove it if possible. INSN is the start of the region, N is the
- region number, and DELETE_OUTER is to note if anything in this
- region can throw.
+ /* Every insn that might throw has an EH_REGION note. */
+ note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note || INTVAL (XEXP (note, 0)) <= 0)
+ return false;
- Regions are removed if they cannot possibly catch an exception.
- This is determined by invoking can_throw on each insn within the
- region; if can_throw returns true for any of the instructions, the
- region can catch an exception, since there is an insn within the
- region that is capable of throwing an exception.
+ region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
- Returns the NOTE_INSN_EH_REGION_END corresponding to this region, or
- calls abort if it can't find one.
+ type_thrown = NULL_TREE;
+ if (region->type == ERT_THROW)
+ {
+ type_thrown = region->u.throw.type;
+ region = region->outer;
+ }
- Can abort if INSN is not a NOTE_INSN_EH_REGION_BEGIN, or if N doesn't
- correspond to the region number, or if DELETE_OUTER is NULL. */
+ /* If this exception is ignored by each and every containing region,
+ then control passes straight out. The runtime may handle some
+ regions, which also do not require processing internally. */
+ for (; region; region = region->outer)
+ {
+ enum reachable_code how = reachable_next_level (region, type_thrown, 0);
+ if (how == RNL_BLOCKED)
+ return false;
+ if (how != RNL_NOT_CAUGHT)
+ return true;
+ }
-static rtx
-scan_region (insn, n, delete_outer)
- rtx insn;
- int n;
- int *delete_outer;
-{
- rtx start = insn;
+ return false;
+}
- /* Assume we can delete the region. */
- int delete = 1;
+/* Determine if the given INSN can throw an exception that is
+ visible outside the function. */
- /* Can't delete something which is rethrown from. */
- if (rethrow_used (n))
- delete = 0;
+bool
+can_throw_external (insn)
+ rtx insn;
+{
+ struct eh_region *region;
+ tree type_thrown;
+ rtx note;
- if (insn == NULL_RTX
- || GET_CODE (insn) != NOTE
- || NOTE_LINE_NUMBER (insn) != NOTE_INSN_EH_REGION_BEG
- || NOTE_EH_HANDLER (insn) != n
- || delete_outer == NULL)
- abort ();
+ if (! INSN_P (insn))
+ return false;
- insn = NEXT_INSN (insn);
+ if (GET_CODE (insn) == INSN
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ insn = XVECEXP (PATTERN (insn), 0, 0);
- /* Look for the matching end. */
- while (! (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
+ if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
{
- /* If anything can throw, we can't remove the region. */
- if (delete && can_throw (insn))
+ int i;
+ for (i = 0; i < 3; ++i)
{
- delete = 0;
+ rtx sub = XEXP (PATTERN (insn), i);
+ for (; sub ; sub = NEXT_INSN (sub))
+ if (can_throw_external (sub))
+ return true;
}
-
- /* Watch out for and handle nested regions. */
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
- {
- insn = scan_region (insn, NOTE_EH_HANDLER (insn), &delete);
- }
-
- insn = NEXT_INSN (insn);
+ return false;
}
- /* The _BEG/_END NOTEs must match and nest. */
- if (NOTE_EH_HANDLER (insn) != n)
- abort ();
-
- /* If anything in this exception region can throw, we can throw. */
- if (! delete)
- *delete_outer = 0;
- else
+ note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note)
{
- /* Delete the start and end of the region. */
- delete_insn (start);
- delete_insn (insn);
-
-/* We no longer removed labels here, since flow will now remove any
- handler which cannot be called any more. */
-
-#if 0
- /* Only do this part if we have built the exception handler
- labels. */
- if (exception_handler_labels)
- {
- rtx x, *prev = &exception_handler_labels;
-
- /* Find it in the list of handlers. */
- for (x = exception_handler_labels; x; x = XEXP (x, 1))
- {
- rtx label = XEXP (x, 0);
- if (CODE_LABEL_NUMBER (label) == n)
- {
- /* If we are the last reference to the handler,
- delete it. */
- if (--LABEL_NUSES (label) == 0)
- delete_insn (label);
-
- if (optimize)
- {
- /* Remove it from the list of exception handler
- labels, if we are optimizing. If we are not, then
- leave it in the list, as we are not really going to
- remove the region. */
- *prev = XEXP (x, 1);
- XEXP (x, 1) = 0;
- XEXP (x, 0) = 0;
- }
-
- break;
- }
- prev = &XEXP (x, 1);
- }
- }
-#endif
+ /* Calls (and trapping insns) without notes are outside any
+ exception handling region in this function. We have to
+ assume it might throw. Given that the front end and middle
+ ends mark known NOTHROW functions, this isn't so wildly
+ inaccurate. */
+ return (GET_CODE (insn) == CALL_INSN
+ || (flag_non_call_exceptions
+ && may_trap_p (PATTERN (insn))));
}
- return insn;
-}
-
-/* Perform various interesting optimizations for exception handling
- code.
+ if (INTVAL (XEXP (note, 0)) <= 0)
+ return false;
- We look for empty exception regions and make them go (away). The
- jump optimization code will remove the handler if nothing else uses
- it. */
+ region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
-void
-exception_optimize ()
-{
- rtx insn;
- int n;
-
- /* Remove empty regions. */
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ type_thrown = NULL_TREE;
+ if (region->type == ERT_THROW)
{
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
- {
- /* Since scan_region will return the NOTE_INSN_EH_REGION_END
- insn, we will indirectly skip through all the insns
- inbetween. We are also guaranteed that the value of insn
- returned will be valid, as otherwise scan_region won't
- return. */
- insn = scan_region (insn, NOTE_EH_HANDLER (insn), &n);
- }
+ type_thrown = region->u.throw.type;
+ region = region->outer;
}
+
+ /* If the exception is caught or blocked by any containing region,
+ then it is not seen by any calling function. */
+ for (; region ; region = region->outer)
+ if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
+ return false;
+
+ return true;
}
-/* This function determines whether the rethrow labels for any of the
- exception regions in the current function are used or not, and set
- the reference flag according. */
+/* True if nothing in this function can throw outside this function. */
-void
-update_rethrow_references ()
+bool
+nothrow_function_p ()
{
rtx insn;
- int x, region;
- int *saw_region, *saw_rethrow;
- if (!flag_new_exceptions)
- return;
-
- saw_region = (int *) xcalloc (current_func_eh_entry, sizeof (int));
- saw_rethrow = (int *) xcalloc (current_func_eh_entry, sizeof (int));
+ if (! flag_exceptions)
+ return true;
- /* Determine what regions exist, and whether there are any rethrows
- from those regions or not. */
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == CALL_INSN)
- {
- rtx note = find_reg_note (insn, REG_EH_RETHROW, NULL_RTX);
- if (note)
- {
- region = eh_region_from_symbol (XEXP (note, 0));
- region = find_func_region (region);
- saw_rethrow[region] = 1;
- }
- }
- else
- if (GET_CODE (insn) == NOTE)
- {
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
- {
- region = find_func_region (NOTE_EH_HANDLER (insn));
- saw_region[region] = 1;
- }
- }
-
- /* For any regions we did see, set the referenced flag. */
- for (x = 0; x < current_func_eh_entry; x++)
- if (saw_region[x])
- function_eh_regions[x].rethrow_ref = saw_rethrow[x];
+ if (can_throw_external (insn))
+ return false;
+ for (insn = current_function_epilogue_delay_list; insn;
+ insn = XEXP (insn, 1))
+ if (can_throw_external (insn))
+ return false;
- /* Clean up. */
- free (saw_region);
- free (saw_rethrow);
+ return true;
}
+
-/* Various hooks for the DWARF 2 __throw routine. */
+/* Various hooks for unwind library. */
/* Do any necessary initialization to access arbitrary stack frames.
On the SPARC, this means flushing the register windows. */
@@ -2950,6 +2919,33 @@ expand_builtin_unwind_init ()
#endif
}
+rtx
+expand_builtin_eh_return_data_regno (arglist)
+ tree arglist;
+{
+ tree which = TREE_VALUE (arglist);
+ unsigned HOST_WIDE_INT iwhich;
+
+ if (TREE_CODE (which) != INTEGER_CST)
+ {
+ error ("argument of `__builtin_eh_return_regno' must be constant");
+ return constm1_rtx;
+ }
+
+ iwhich = tree_low_cst (which, 1);
+ iwhich = EH_RETURN_DATA_REGNO (iwhich);
+ if (iwhich == INVALID_REGNUM)
+ return constm1_rtx;
+
+#ifdef DWARF_FRAME_REGNUM
+ iwhich = DWARF_FRAME_REGNUM (iwhich);
+#else
+ iwhich = DBX_REGISTER_NUMBER (iwhich);
+#endif
+
+ return GEN_INT (iwhich);
+}
+
/* Given a value extracted from the return address register or stack slot,
return the actual address encoded in that value. */
@@ -2958,7 +2954,18 @@ expand_builtin_extract_return_addr (addr_tree)
tree addr_tree;
{
rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
- return eh_outer_context (addr);
+
+ /* First mask out any unwanted bits. */
+#ifdef MASK_RETURN_ADDR
+ expand_and (addr, MASK_RETURN_ADDR, addr);
+#endif
+
+ /* Then adjust to find the real return address. */
+#if defined (RETURN_ADDR_OFFSET)
+ addr = plus_constant (addr, RETURN_ADDR_OFFSET);
+#endif
+
+ return addr;
}
/* Given an actual address in addr_tree, do any necessary encoding
@@ -2970,540 +2977,718 @@ expand_builtin_frob_return_addr (addr_tree)
tree addr_tree;
{
rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
+
#ifdef RETURN_ADDR_OFFSET
+ addr = force_reg (Pmode, addr);
addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
#endif
+
return addr;
}
-/* Choose three registers for communication between the main body of
- __throw and the epilogue (or eh stub) and the exception handler.
- We must do this with hard registers because the epilogue itself
- will be generated after reload, at which point we may not reference
- pseudos at all.
-
- The first passes the exception context to the handler. For this
- we use the return value register for a void*.
-
- The second holds the stack pointer value to be restored. For this
- we use the static chain register if it exists, is different from
- the previous, and is call-clobbered; otherwise some arbitrary
- call-clobbered register.
-
- The third holds the address of the handler itself. Here we use
- some arbitrary call-clobbered register. */
+/* Set up the epilogue with the magic bits we'll need to return to the
+ exception handler. */
-static void
-eh_regs (pcontext, psp, pra, outgoing)
- rtx *pcontext, *psp, *pra;
- int outgoing ATTRIBUTE_UNUSED;
+void
+expand_builtin_eh_return (stackadj_tree, handler_tree)
+ tree stackadj_tree, handler_tree;
{
- rtx rcontext, rsp, rra;
- unsigned int i;
+ rtx stackadj, handler;
-#ifdef FUNCTION_OUTGOING_VALUE
- if (outgoing)
- rcontext = FUNCTION_OUTGOING_VALUE (build_pointer_type (void_type_node),
- current_function_decl);
- else
-#endif
- rcontext = FUNCTION_VALUE (build_pointer_type (void_type_node),
- current_function_decl);
+ stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
+ handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
-#ifdef STATIC_CHAIN_REGNUM
- if (outgoing)
- rsp = static_chain_incoming_rtx;
+ if (! cfun->eh->ehr_label)
+ {
+ cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
+ cfun->eh->ehr_handler = copy_to_reg (handler);
+ cfun->eh->ehr_label = gen_label_rtx ();
+ }
else
- rsp = static_chain_rtx;
- if (REGNO (rsp) == REGNO (rcontext)
- || ! call_used_regs [REGNO (rsp)])
-#endif /* STATIC_CHAIN_REGNUM */
- rsp = NULL_RTX;
-
- if (rsp == NULL_RTX)
{
- for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
- if (call_used_regs[i] && ! fixed_regs[i] && i != REGNO (rcontext))
- break;
- if (i == FIRST_PSEUDO_REGISTER)
- abort();
-
- rsp = gen_rtx_REG (Pmode, i);
+ if (stackadj != cfun->eh->ehr_stackadj)
+ emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
+ if (handler != cfun->eh->ehr_handler)
+ emit_move_insn (cfun->eh->ehr_handler, handler);
}
- for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
- if (call_used_regs[i] && ! fixed_regs[i]
- && i != REGNO (rcontext) && i != REGNO (rsp))
- break;
- if (i == FIRST_PSEUDO_REGISTER)
- abort();
-
- rra = gen_rtx_REG (Pmode, i);
-
- *pcontext = rcontext;
- *psp = rsp;
- *pra = rra;
-}
-
-/* Retrieve the register which contains the pointer to the eh_context
- structure set the __throw. */
-
-#if 0
-rtx
-get_reg_for_handler ()
-{
- rtx reg1;
- reg1 = FUNCTION_VALUE (build_pointer_type (void_type_node),
- current_function_decl);
- return reg1;
-}
-#endif
-
-/* Set up the epilogue with the magic bits we'll need to return to the
- exception handler. */
-
-void
-expand_builtin_eh_return (context, stack, handler)
- tree context, stack, handler;
-{
- if (eh_return_context)
- error("Duplicate call to __builtin_eh_return");
-
- eh_return_context
- = copy_to_reg (expand_expr (context, NULL_RTX, VOIDmode, 0));
- eh_return_stack_adjust
- = copy_to_reg (expand_expr (stack, NULL_RTX, VOIDmode, 0));
- eh_return_handler
- = copy_to_reg (expand_expr (handler, NULL_RTX, VOIDmode, 0));
+ emit_jump (cfun->eh->ehr_label);
}
void
expand_eh_return ()
{
- rtx reg1, reg2, reg3;
- rtx stub_start, after_stub;
- rtx ra, tmp;
+ rtx sa, ra, around_label;
- if (!eh_return_context)
+ if (! cfun->eh->ehr_label)
return;
- current_function_cannot_inline = N_("function uses __builtin_eh_return");
-
- eh_regs (&reg1, &reg2, &reg3, 1);
-#ifdef POINTERS_EXTEND_UNSIGNED
- eh_return_context = convert_memory_address (Pmode, eh_return_context);
- eh_return_stack_adjust =
- convert_memory_address (Pmode, eh_return_stack_adjust);
- eh_return_handler = convert_memory_address (Pmode, eh_return_handler);
-#endif
- emit_move_insn (reg1, eh_return_context);
- emit_move_insn (reg2, eh_return_stack_adjust);
- emit_move_insn (reg3, eh_return_handler);
-
- /* Talk directly to the target's epilogue code when possible. */
-
-#ifdef HAVE_eh_epilogue
- if (HAVE_eh_epilogue)
+ sa = EH_RETURN_STACKADJ_RTX;
+ if (! sa)
{
- emit_insn (gen_eh_epilogue (reg1, reg2, reg3));
+ error ("__builtin_eh_return not supported on this target");
return;
}
-#endif
- /* Otherwise, use the same stub technique we had before. */
+ current_function_calls_eh_return = 1;
- eh_return_stub_label = stub_start = gen_label_rtx ();
- after_stub = gen_label_rtx ();
+ around_label = gen_label_rtx ();
+ emit_move_insn (sa, const0_rtx);
+ emit_jump (around_label);
- /* Set the return address to the stub label. */
+ emit_label (cfun->eh->ehr_label);
+ clobber_return_register ();
- ra = expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
- 0, hard_frame_pointer_rtx);
- if (GET_CODE (ra) == REG && REGNO (ra) >= FIRST_PSEUDO_REGISTER)
- abort();
-
- tmp = memory_address (Pmode, gen_rtx_LABEL_REF (Pmode, stub_start));
-#ifdef RETURN_ADDR_OFFSET
- tmp = plus_constant (tmp, -RETURN_ADDR_OFFSET);
+#ifdef HAVE_eh_return
+ if (HAVE_eh_return)
+ emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
+ else
#endif
- tmp = force_operand (tmp, ra);
- if (tmp != ra)
- emit_move_insn (ra, tmp);
-
- /* Indicate that the registers are in fact used. */
- emit_insn (gen_rtx_USE (VOIDmode, reg1));
- emit_insn (gen_rtx_USE (VOIDmode, reg2));
- emit_insn (gen_rtx_USE (VOIDmode, reg3));
- if (GET_CODE (ra) == REG)
- emit_insn (gen_rtx_USE (VOIDmode, ra));
+ {
+ rtx handler;
- /* Generate the stub. */
+ ra = EH_RETURN_HANDLER_RTX;
+ if (! ra)
+ {
+ error ("__builtin_eh_return not supported on this target");
+ ra = gen_reg_rtx (Pmode);
+ }
- emit_jump (after_stub);
- emit_label (stub_start);
+ emit_move_insn (sa, cfun->eh->ehr_stackadj);
- eh_regs (&reg1, &reg2, &reg3, 0);
- adjust_stack (reg2);
- emit_indirect_jump (reg3);
+ handler = cfun->eh->ehr_handler;
+ if (GET_MODE (ra) != Pmode)
+ {
+#ifdef POINTERS_EXTEND_UNSIGNED
+ handler = convert_memory_address (GET_MODE (ra), handler);
+#else
+ handler = convert_to_mode (GET_MODE (ra), handler, 0);
+#endif
+ }
+ emit_move_insn (ra, handler);
+ }
- emit_label (after_stub);
+ emit_label (around_label);
}
+struct action_record
+{
+ int offset;
+ int filter;
+ int next;
+};
-/* This contains the code required to verify whether arbitrary instructions
- are in the same exception region. */
+static int
+action_record_eq (pentry, pdata)
+ const PTR pentry;
+ const PTR pdata;
+{
+ const struct action_record *entry = (const struct action_record *) pentry;
+ const struct action_record *data = (const struct action_record *) pdata;
+ return entry->filter == data->filter && entry->next == data->next;
+}
-static int *insn_eh_region = (int *)0;
-static int maximum_uid;
+static hashval_t
+action_record_hash (pentry)
+ const PTR pentry;
+{
+ const struct action_record *entry = (const struct action_record *) pentry;
+ return entry->next * 1009 + entry->filter;
+}
-static void
-set_insn_eh_region (first, region_num)
- rtx *first;
- int region_num;
+static int
+add_action_record (ar_hash, filter, next)
+ htab_t ar_hash;
+ int filter, next;
{
- rtx insn;
- int rnum;
+ struct action_record **slot, *new, tmp;
- for (insn = *first; insn; insn = NEXT_INSN (insn))
+ tmp.filter = filter;
+ tmp.next = next;
+ slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
+
+ if ((new = *slot) == NULL)
{
- if ((GET_CODE (insn) == NOTE)
- && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG))
- {
- rnum = NOTE_EH_HANDLER (insn);
- insn_eh_region[INSN_UID (insn)] = rnum;
- insn = NEXT_INSN (insn);
- set_insn_eh_region (&insn, rnum);
- /* Upon return, insn points to the EH_REGION_END of nested region */
- continue;
- }
- insn_eh_region[INSN_UID (insn)] = region_num;
- if ((GET_CODE (insn) == NOTE) &&
- (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
- break;
+ new = (struct action_record *) xmalloc (sizeof (*new));
+ new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
+ new->filter = filter;
+ new->next = next;
+ *slot = new;
+
+ /* The filter value goes in untouched. The link to the next
+ record is a "self-relative" byte offset, or zero to indicate
+ that there is no next record. So convert the absolute 1 based
+ indicies we've been carrying around into a displacement. */
+
+ push_sleb128 (&cfun->eh->action_record_data, filter);
+ if (next)
+ next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
+ push_sleb128 (&cfun->eh->action_record_data, next);
}
- *first = insn;
-}
-/* Free the insn table, an make sure it cannot be used again. */
+ return new->offset;
+}
-void
-free_insn_eh_region ()
+static int
+collect_one_action_chain (ar_hash, region)
+ htab_t ar_hash;
+ struct eh_region *region;
{
- if (!doing_eh (0))
- return;
+ struct eh_region *c;
+ int next;
- if (insn_eh_region)
+ /* If we've reached the top of the region chain, then we have
+ no actions, and require no landing pad. */
+ if (region == NULL)
+ return -1;
+
+ switch (region->type)
{
- free (insn_eh_region);
- insn_eh_region = (int *)0;
+ case ERT_CLEANUP:
+ /* A cleanup adds a zero filter to the beginning of the chain, but
+ there are special cases to look out for. If there are *only*
+ cleanups along a path, then it compresses to a zero action.
+ Further, if there are multiple cleanups along a path, we only
+ need to represent one of them, as that is enough to trigger
+ entry to the landing pad at runtime. */
+ next = collect_one_action_chain (ar_hash, region->outer);
+ if (next <= 0)
+ return 0;
+ for (c = region->outer; c ; c = c->outer)
+ if (c->type == ERT_CLEANUP)
+ return next;
+ return add_action_record (ar_hash, 0, next);
+
+ case ERT_TRY:
+ /* Process the associated catch regions in reverse order.
+ If there's a catch-all handler, then we don't need to
+ search outer regions. Use a magic -3 value to record
+ that we havn't done the outer search. */
+ next = -3;
+ for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
+ {
+ if (c->u.catch.type == NULL)
+ next = add_action_record (ar_hash, c->u.catch.filter, 0);
+ else
+ {
+ if (next == -3)
+ {
+ next = collect_one_action_chain (ar_hash, region->outer);
+ if (next < 0)
+ next = 0;
+ }
+ next = add_action_record (ar_hash, c->u.catch.filter, next);
+ }
+ }
+ return next;
+
+ case ERT_ALLOWED_EXCEPTIONS:
+ /* An exception specification adds its filter to the
+ beginning of the chain. */
+ next = collect_one_action_chain (ar_hash, region->outer);
+ return add_action_record (ar_hash, region->u.allowed.filter,
+ next < 0 ? 0 : next);
+
+ case ERT_MUST_NOT_THROW:
+ /* A must-not-throw region with no inner handlers or cleanups
+ requires no call-site entry. Note that this differs from
+ the no handler or cleanup case in that we do require an lsda
+ to be generated. Return a magic -2 value to record this. */
+ return -2;
+
+ case ERT_CATCH:
+ case ERT_THROW:
+ /* CATCH regions are handled in TRY above. THROW regions are
+ for optimization information only and produce no output. */
+ return collect_one_action_chain (ar_hash, region->outer);
+
+ default:
+ abort ();
}
}
-/* Initialize the table. max_uid must be calculated and handed into
- this routine. If it is unavailable, passing a value of 0 will
- cause this routine to calculate it as well. */
-
-void
-init_insn_eh_region (first, max_uid)
- rtx first;
- int max_uid;
+static int
+add_call_site (landing_pad, action)
+ rtx landing_pad;
+ int action;
{
- rtx insn;
+ struct call_site_record *data = cfun->eh->call_site_data;
+ int used = cfun->eh->call_site_data_used;
+ int size = cfun->eh->call_site_data_size;
- if (!doing_eh (0))
- return;
+ if (used >= size)
+ {
+ size = (size ? size * 2 : 64);
+ data = (struct call_site_record *)
+ xrealloc (data, sizeof (*data) * size);
+ cfun->eh->call_site_data = data;
+ cfun->eh->call_site_data_size = size;
+ }
- if (insn_eh_region)
- free_insn_eh_region();
+ data[used].landing_pad = landing_pad;
+ data[used].action = action;
- if (max_uid == 0)
- for (insn = first; insn; insn = NEXT_INSN (insn))
- if (INSN_UID (insn) > max_uid) /* find largest UID */
- max_uid = INSN_UID (insn);
+ cfun->eh->call_site_data_used = used + 1;
- maximum_uid = max_uid;
- insn_eh_region = (int *) xmalloc ((max_uid + 1) * sizeof (int));
- insn = first;
- set_insn_eh_region (&insn, 0);
+ return used + call_site_base;
}
+/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
+ The new note numbers will not refer to region numbers, but
+ instead to call site entries. */
-/* Check whether 2 instructions are within the same region. */
-
-int
-in_same_eh_region (insn1, insn2)
- rtx insn1, insn2;
+void
+convert_to_eh_region_ranges ()
{
- int ret, uid1, uid2;
+ rtx insn, iter, note;
+ htab_t ar_hash;
+ int last_action = -3;
+ rtx last_action_insn = NULL_RTX;
+ rtx last_landing_pad = NULL_RTX;
+ rtx first_no_action_insn = NULL_RTX;
+ int call_site;
- /* If no exceptions, instructions are always in same region. */
- if (!doing_eh (0))
- return 1;
+ if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
+ return;
- /* If the table isn't allocated, assume the worst. */
- if (!insn_eh_region)
- return 0;
+ VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
- uid1 = INSN_UID (insn1);
- uid2 = INSN_UID (insn2);
+ ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
- /* if instructions have been allocated beyond the end, either
- the table is out of date, or this is a late addition, or
- something... Assume the worst. */
- if (uid1 > maximum_uid || uid2 > maximum_uid)
- return 0;
+ for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
+ if (INSN_P (iter))
+ {
+ struct eh_region *region;
+ int this_action;
+ rtx this_landing_pad;
- ret = (insn_eh_region[uid1] == insn_eh_region[uid2]);
- return ret;
-}
-
+ insn = iter;
+ if (GET_CODE (insn) == INSN
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ insn = XVECEXP (PATTERN (insn), 0, 0);
-/* This function will initialize the handler list for a specified block.
- It may recursively call itself if the outer block hasn't been processed
- yet. At some point in the future we can trim out handlers which we
- know cannot be called. (ie, if a block has an INT type handler,
- control will never be passed to an outer INT type handler). */
-
-static void
-process_nestinfo (block, info, nested_eh_region)
- int block;
- eh_nesting_info *info;
- int *nested_eh_region;
-{
- handler_info *ptr, *last_ptr = NULL;
- int x, y, count = 0;
- int extra = 0;
- handler_info **extra_handlers = 0;
- int index = info->region_index[block];
-
- /* If we've already processed this block, simply return. */
- if (info->num_handlers[index] > 0)
- return;
+ note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note)
+ {
+ if (! (GET_CODE (insn) == CALL_INSN
+ || (flag_non_call_exceptions
+ && may_trap_p (PATTERN (insn)))))
+ continue;
+ this_action = -1;
+ region = NULL;
+ }
+ else
+ {
+ if (INTVAL (XEXP (note, 0)) <= 0)
+ continue;
+ region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+ this_action = collect_one_action_chain (ar_hash, region);
+ }
+
+ /* Existence of catch handlers, or must-not-throw regions
+ implies that an lsda is needed (even if empty). */
+ if (this_action != -1)
+ cfun->uses_eh_lsda = 1;
+
+ /* Delay creation of region notes for no-action regions
+ until we're sure that an lsda will be required. */
+ else if (last_action == -3)
+ {
+ first_no_action_insn = iter;
+ last_action = -1;
+ }
- for (ptr = get_first_handler (block); ptr; last_ptr = ptr, ptr = ptr->next)
- count++;
+ /* Cleanups and handlers may share action chains but not
+ landing pads. Collect the landing pad for this region. */
+ if (this_action >= 0)
+ {
+ struct eh_region *o;
+ for (o = region; ! o->landing_pad ; o = o->outer)
+ continue;
+ this_landing_pad = o->landing_pad;
+ }
+ else
+ this_landing_pad = NULL_RTX;
- /* pick up any information from the next outer region. It will already
- contain a summary of itself and all outer regions to it. */
+ /* Differing actions or landing pads implies a change in call-site
+ info, which implies some EH_REGION note should be emitted. */
+ if (last_action != this_action
+ || last_landing_pad != this_landing_pad)
+ {
+ /* If we'd not seen a previous action (-3) or the previous
+ action was must-not-throw (-2), then we do not need an
+ end note. */
+ if (last_action >= -1)
+ {
+ /* If we delayed the creation of the begin, do it now. */
+ if (first_no_action_insn)
+ {
+ call_site = add_call_site (NULL_RTX, 0);
+ note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
+ first_no_action_insn);
+ NOTE_EH_HANDLER (note) = call_site;
+ first_no_action_insn = NULL_RTX;
+ }
+
+ note = emit_note_after (NOTE_INSN_EH_REGION_END,
+ last_action_insn);
+ NOTE_EH_HANDLER (note) = call_site;
+ }
+
+ /* If the new action is must-not-throw, then no region notes
+ are created. */
+ if (this_action >= -1)
+ {
+ call_site = add_call_site (this_landing_pad,
+ this_action < 0 ? 0 : this_action);
+ note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
+ NOTE_EH_HANDLER (note) = call_site;
+ }
+
+ last_action = this_action;
+ last_landing_pad = this_landing_pad;
+ }
+ last_action_insn = iter;
+ }
- if (nested_eh_region [block] != 0)
+ if (last_action >= -1 && ! first_no_action_insn)
{
- int nested_index = info->region_index[nested_eh_region[block]];
- process_nestinfo (nested_eh_region[block], info, nested_eh_region);
- extra = info->num_handlers[nested_index];
- extra_handlers = info->handlers[nested_index];
- info->outer_index[index] = nested_index;
+ note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
+ NOTE_EH_HANDLER (note) = call_site;
}
- /* If the last handler is either a CATCH_ALL or a cleanup, then we
- won't use the outer ones since we know control will not go past the
- catch-all or cleanup. */
+ htab_delete (ar_hash);
+}
- if (last_ptr != NULL && (last_ptr->type_info == NULL
- || last_ptr->type_info == CATCH_ALL_TYPE))
- extra = 0;
+
+static void
+push_uleb128 (data_area, value)
+ varray_type *data_area;
+ unsigned int value;
+{
+ do
+ {
+ unsigned char byte = value & 0x7f;
+ value >>= 7;
+ if (value)
+ byte |= 0x80;
+ VARRAY_PUSH_UCHAR (*data_area, byte);
+ }
+ while (value);
+}
- info->num_handlers[index] = count + extra;
- info->handlers[index] = (handler_info **) xmalloc ((count + extra)
- * sizeof (handler_info **));
+static void
+push_sleb128 (data_area, value)
+ varray_type *data_area;
+ int value;
+{
+ unsigned char byte;
+ int more;
- /* First put all our handlers into the list. */
- ptr = get_first_handler (block);
- for (x = 0; x < count; x++)
+ do
{
- info->handlers[index][x] = ptr;
- ptr = ptr->next;
+ byte = value & 0x7f;
+ value >>= 7;
+ more = ! ((value == 0 && (byte & 0x40) == 0)
+ || (value == -1 && (byte & 0x40) != 0));
+ if (more)
+ byte |= 0x80;
+ VARRAY_PUSH_UCHAR (*data_area, byte);
}
+ while (more);
+}
- /* Now add all the outer region handlers, if they aren't they same as
- one of the types in the current block. We won't worry about
- derived types yet, we'll just look for the exact type. */
- for (y =0, x = 0; x < extra ; x++)
+
+#ifndef HAVE_AS_LEB128
+static int
+dw2_size_of_call_site_table ()
+{
+ int n = cfun->eh->call_site_data_used;
+ int size = n * (4 + 4 + 4);
+ int i;
+
+ for (i = 0; i < n; ++i)
{
- int i, ok;
- ok = 1;
- /* Check to see if we have a type duplication. */
- for (i = 0; i < count; i++)
- if (info->handlers[index][i]->type_info == extra_handlers[x]->type_info)
- {
- ok = 0;
- /* Record one less handler. */
- (info->num_handlers[index])--;
- break;
- }
- if (ok)
- {
- info->handlers[index][y + count] = extra_handlers[x];
- y++;
- }
+ struct call_site_record *cs = &cfun->eh->call_site_data[i];
+ size += size_of_uleb128 (cs->action);
}
-}
-/* This function will allocate and initialize an eh_nesting_info structure.
- It returns a pointer to the completed data structure. If there are
- no exception regions, a NULL value is returned. */
+ return size;
+}
-eh_nesting_info *
-init_eh_nesting_info ()
+static int
+sjlj_size_of_call_site_table ()
{
- int *nested_eh_region;
- int region_count = 0;
- rtx eh_note = NULL_RTX;
- eh_nesting_info *info;
- rtx insn;
- int x;
+ int n = cfun->eh->call_site_data_used;
+ int size = 0;
+ int i;
- if (! flag_exceptions)
- return 0;
+ for (i = 0; i < n; ++i)
+ {
+ struct call_site_record *cs = &cfun->eh->call_site_data[i];
+ size += size_of_uleb128 (INTVAL (cs->landing_pad));
+ size += size_of_uleb128 (cs->action);
+ }
- info = (eh_nesting_info *) xmalloc (sizeof (eh_nesting_info));
- info->region_index = (int *) xcalloc ((max_label_num () + 1), sizeof (int));
- nested_eh_region = (int *) xcalloc (max_label_num () + 1, sizeof (int));
+ return size;
+}
+#endif
+
+static void
+dw2_output_call_site_table ()
+{
+ const char *function_start_lab
+ = IDENTIFIER_POINTER (current_function_func_begin_label);
+ int n = cfun->eh->call_site_data_used;
+ int i;
- /* Create the nested_eh_region list. If indexed with a block number, it
- returns the block number of the next outermost region, if any.
- We can count the number of regions and initialize the region_index
- vector at the same time. */
- for (insn = get_insns(); insn; insn = NEXT_INSN (insn))
+ for (i = 0; i < n; ++i)
{
- if (GET_CODE (insn) == NOTE)
- {
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
- {
- int block = NOTE_EH_HANDLER (insn);
- region_count++;
- info->region_index[block] = region_count;
- if (eh_note)
- nested_eh_region [block] =
- NOTE_EH_HANDLER (XEXP (eh_note, 0));
- else
- nested_eh_region [block] = 0;
- eh_note = gen_rtx_EXPR_LIST (VOIDmode, insn, eh_note);
- }
- else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
- eh_note = XEXP (eh_note, 1);
- }
+ struct call_site_record *cs = &cfun->eh->call_site_data[i];
+ char reg_start_lab[32];
+ char reg_end_lab[32];
+ char landing_pad_lab[32];
+
+ ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
+ ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
+
+ if (cs->landing_pad)
+ ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
+ CODE_LABEL_NUMBER (cs->landing_pad));
+
+ /* ??? Perhaps use insn length scaling if the assembler supports
+ generic arithmetic. */
+ /* ??? Perhaps use attr_length to choose data1 or data2 instead of
+ data4 if the function is small enough. */
+#ifdef HAVE_AS_LEB128
+ dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
+ "region %d start", i);
+ dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
+ "length");
+ if (cs->landing_pad)
+ dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
+ "landing pad");
+ else
+ dw2_asm_output_data_uleb128 (0, "landing pad");
+#else
+ dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
+ "region %d start", i);
+ dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
+ if (cs->landing_pad)
+ dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
+ "landing pad");
+ else
+ dw2_asm_output_data (4, 0, "landing pad");
+#endif
+ dw2_asm_output_data_uleb128 (cs->action, "action");
}
-
- /* If there are no regions, wrap it up now. */
- if (region_count == 0)
+
+ call_site_base += n;
+}
+
+static void
+sjlj_output_call_site_table ()
+{
+ int n = cfun->eh->call_site_data_used;
+ int i;
+
+ for (i = 0; i < n; ++i)
{
- free (info->region_index);
- free (info);
- free (nested_eh_region);
- return NULL;
+ struct call_site_record *cs = &cfun->eh->call_site_data[i];
+
+ dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
+ "region %d landing pad", i);
+ dw2_asm_output_data_uleb128 (cs->action, "action");
}
- region_count++;
- info->handlers = (handler_info ***) xcalloc (region_count,
- sizeof (handler_info ***));
- info->num_handlers = (int *) xcalloc (region_count, sizeof (int));
- info->outer_index = (int *) xcalloc (region_count, sizeof (int));
+ call_site_base += n;
+}
+
+void
+output_function_exception_table ()
+{
+ int tt_format, cs_format, lp_format, i, n;
+#ifdef HAVE_AS_LEB128
+ char ttype_label[32];
+ char cs_after_size_label[32];
+ char cs_end_label[32];
+#else
+ int call_site_len;
+#endif
+ int have_tt_data;
+ int funcdef_number;
+ int tt_format_size;
- /* Now initialize the handler lists for all exception blocks. */
- for (x = 0; x <= max_label_num (); x++)
+ /* Not all functions need anything. */
+ if (! cfun->uses_eh_lsda)
+ return;
+
+ funcdef_number = (USING_SJLJ_EXCEPTIONS
+ ? sjlj_funcdef_number
+ : current_funcdef_number);
+
+#ifdef IA64_UNWIND_INFO
+ fputs ("\t.personality\t", asm_out_file);
+ output_addr_const (asm_out_file, eh_personality_libfunc);
+ fputs ("\n\t.handlerdata\n", asm_out_file);
+ /* Note that varasm still thinks we're in the function's code section.
+ The ".endp" directive that will immediately follow will take us back. */
+#else
+ exception_section ();
+#endif
+
+ have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
+ || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
+
+ /* Indicate the format of the @TType entries. */
+ if (! have_tt_data)
+ tt_format = DW_EH_PE_omit;
+ else
{
- if (info->region_index[x] != 0)
- process_nestinfo (x, info, nested_eh_region);
+ tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
+#ifdef HAVE_AS_LEB128
+ ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
+#endif
+ tt_format_size = size_of_encoded_value (tt_format);
+
+ assemble_eh_align (tt_format_size * BITS_PER_UNIT);
}
- info->region_count = region_count;
- /* Clean up. */
- free (nested_eh_region);
+ ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
- return info;
-}
+ /* The LSDA header. */
+ /* Indicate the format of the landing pad start pointer. An omitted
+ field implies @LPStart == @Start. */
+ /* Currently we always put @LPStart == @Start. This field would
+ be most useful in moving the landing pads completely out of
+ line to another section, but it could also be used to minimize
+ the size of uleb128 landing pad offsets. */
+ lp_format = DW_EH_PE_omit;
+ dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
+ eh_data_format_name (lp_format));
-/* This function is used to retreive the vector of handlers which
- can be reached by a given insn in a given exception region.
- BLOCK is the exception block the insn is in.
- INFO is the eh_nesting_info structure.
- INSN is the (optional) insn within the block. If insn is not NULL_RTX,
- it may contain reg notes which modify its throwing behavior, and
- these will be obeyed. If NULL_RTX is passed, then we simply return the
- handlers for block.
- HANDLERS is the address of a pointer to a vector of handler_info pointers.
- Upon return, this will have the handlers which can be reached by block.
- This function returns the number of elements in the handlers vector. */
+ /* @LPStart pointer would go here. */
-int
-reachable_handlers (block, info, insn, handlers)
- int block;
- eh_nesting_info *info;
- rtx insn ;
- handler_info ***handlers;
-{
- int index = 0;
- *handlers = NULL;
+ dw2_asm_output_data (1, tt_format, "@TType format (%s)",
+ eh_data_format_name (tt_format));
- if (info == NULL)
- return 0;
- if (block > 0)
- index = info->region_index[block];
-
- if (insn && GET_CODE (insn) == CALL_INSN)
- {
- /* RETHROWs specify a region number from which we are going to rethrow.
- This means we won't pass control to handlers in the specified
- region, but rather any region OUTSIDE the specified region.
- We accomplish this by setting block to the outer_index of the
- specified region. */
- rtx note = find_reg_note (insn, REG_EH_RETHROW, NULL_RTX);
- if (note)
+#ifndef HAVE_AS_LEB128
+ if (USING_SJLJ_EXCEPTIONS)
+ call_site_len = sjlj_size_of_call_site_table ();
+ else
+ call_site_len = dw2_size_of_call_site_table ();
+#endif
+
+ /* A pc-relative 4-byte displacement to the @TType data. */
+ if (have_tt_data)
+ {
+#ifdef HAVE_AS_LEB128
+ char ttype_after_disp_label[32];
+ ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
+ funcdef_number);
+ dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
+ "@TType base offset");
+ ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
+#else
+ /* Ug. Alignment queers things. */
+ unsigned int before_disp, after_disp, last_disp, disp;
+
+ before_disp = 1 + 1;
+ after_disp = (1 + size_of_uleb128 (call_site_len)
+ + call_site_len
+ + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
+ + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
+ * tt_format_size));
+
+ disp = after_disp;
+ do
{
- index = eh_region_from_symbol (XEXP (note, 0));
- index = info->region_index[index];
- if (index)
- index = info->outer_index[index];
- }
- else
- {
- /* If there is no rethrow, we look for a REG_EH_REGION, and
- we'll throw from that block. A value of 0 or less
- indicates that this insn cannot throw. */
- note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
- if (note)
- {
- int b = INTVAL (XEXP (note, 0));
- if (b <= 0)
- index = 0;
- else
- index = info->region_index[b];
- }
+ unsigned int disp_size, pad;
+
+ last_disp = disp;
+ disp_size = size_of_uleb128 (disp);
+ pad = before_disp + disp_size + after_disp;
+ if (pad % tt_format_size)
+ pad = tt_format_size - (pad % tt_format_size);
+ else
+ pad = 0;
+ disp = after_disp + pad;
}
+ while (disp != last_disp);
+
+ dw2_asm_output_data_uleb128 (disp, "@TType base offset");
+#endif
}
- /* If we reach this point, and index is 0, there is no throw. */
- if (index == 0)
- return 0;
-
- *handlers = info->handlers[index];
- return info->num_handlers[index];
-}
+ /* Indicate the format of the call-site offsets. */
+#ifdef HAVE_AS_LEB128
+ cs_format = DW_EH_PE_uleb128;
+#else
+ cs_format = DW_EH_PE_udata4;
+#endif
+ dw2_asm_output_data (1, cs_format, "call-site format (%s)",
+ eh_data_format_name (cs_format));
+
+#ifdef HAVE_AS_LEB128
+ ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
+ funcdef_number);
+ ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
+ funcdef_number);
+ dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
+ "Call-site table length");
+ ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
+ if (USING_SJLJ_EXCEPTIONS)
+ sjlj_output_call_site_table ();
+ else
+ dw2_output_call_site_table ();
+ ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
+#else
+ dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
+ if (USING_SJLJ_EXCEPTIONS)
+ sjlj_output_call_site_table ();
+ else
+ dw2_output_call_site_table ();
+#endif
-/* This function will free all memory associated with the eh_nesting info. */
+ /* ??? Decode and interpret the data for flag_debug_asm. */
+ n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
+ for (i = 0; i < n; ++i)
+ dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
+ (i ? NULL : "Action record table"));
-void
-free_eh_nesting_info (info)
- eh_nesting_info *info;
-{
- int x;
- if (info != NULL)
+ if (have_tt_data)
+ assemble_eh_align (tt_format_size * BITS_PER_UNIT);
+
+ i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
+ while (i-- > 0)
{
- if (info->region_index)
- free (info->region_index);
- if (info->num_handlers)
- free (info->num_handlers);
- if (info->outer_index)
- free (info->outer_index);
- if (info->handlers)
- {
- for (x = 0; x < info->region_count; x++)
- if (info->handlers[x])
- free (info->handlers[x]);
- free (info->handlers);
- }
- free (info);
+ tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
+
+ if (type == NULL_TREE)
+ type = integer_zero_node;
+ else
+ type = lookup_type_for_runtime (type);
+
+ dw2_asm_output_encoded_addr_rtx (tt_format,
+ expand_expr (type, NULL_RTX, VOIDmode,
+ EXPAND_INITIALIZER),
+ NULL);
}
+
+#ifdef HAVE_AS_LEB128
+ if (have_tt_data)
+ ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
+#endif
+
+ /* ??? Decode and interpret the data for flag_debug_asm. */
+ n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
+ for (i = 0; i < n; ++i)
+ dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
+ (i ? NULL : "Exception specification table"));
+
+ function_section (current_function_decl);
+
+ if (USING_SJLJ_EXCEPTIONS)
+ sjlj_funcdef_number += 1;
}