#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
#include "rtl.h"
#include "tm_p.h"
#include "flags.h"
#include "function.h"
#include "except.h"
#include "tree-flow.h"
#include "tree-dump.h"
#include "tree-inline.h"
#include "tree-iterator.h"
#include "tree-pass.h"
#include "timevar.h"
#include "langhooks.h"
#include "ggc.h"
static int using_eh_for_cleanups_p = 0;
void
using_eh_for_cleanups (void)
{
using_eh_for_cleanups_p = 1;
}
static int
struct_ptr_eq (const void *a, const void *b)
{
const void * const * x = a;
const void * const * y = b;
return *x == *y;
}
static hashval_t
struct_ptr_hash (const void *a)
{
const void * const * x = a;
return (size_t)*x >> 4;
}
struct throw_stmt_node GTY(())
{
tree stmt;
int region_nr;
};
static GTY((param_is (struct throw_stmt_node))) htab_t throw_stmt_table;
static void
record_stmt_eh_region (struct eh_region *region, tree t)
{
struct throw_stmt_node *n;
void **slot;
if (!region)
return;
n = ggc_alloc (sizeof (*n));
n->stmt = t;
n->region_nr = get_eh_region_number (region);
slot = htab_find_slot (throw_stmt_table, n, INSERT);
gcc_assert (!*slot);
*slot = n;
}
void
add_stmt_to_eh_region (tree t, int num)
{
struct throw_stmt_node *n;
void **slot;
gcc_assert (num >= 0);
n = ggc_alloc (sizeof (*n));
n->stmt = t;
n->region_nr = num;
slot = htab_find_slot (throw_stmt_table, n, INSERT);
gcc_assert (!*slot);
*slot = n;
}
bool
remove_stmt_from_eh_region (tree t)
{
struct throw_stmt_node dummy;
void **slot;
if (!throw_stmt_table)
return false;
dummy.stmt = t;
slot = htab_find_slot (throw_stmt_table, &dummy, NO_INSERT);
if (slot)
{
htab_clear_slot (throw_stmt_table, slot);
return true;
}
else
return false;
}
int
lookup_stmt_eh_region (tree t)
{
struct throw_stmt_node *p, n;
if (!throw_stmt_table)
return -2;
n.stmt = t;
p = htab_find (throw_stmt_table, &n);
return (p ? p->region_nr : -1);
}
struct finally_tree_node
{
tree child, parent;
};
static htab_t finally_tree;
static void
record_in_finally_tree (tree child, tree parent)
{
struct finally_tree_node *n;
void **slot;
n = xmalloc (sizeof (*n));
n->child = child;
n->parent = parent;
slot = htab_find_slot (finally_tree, n, INSERT);
gcc_assert (!*slot);
*slot = n;
}
static void
collect_finally_tree (tree t, tree region)
{
tailrecurse:
switch (TREE_CODE (t))
{
case LABEL_EXPR:
record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
break;
case TRY_FINALLY_EXPR:
record_in_finally_tree (t, region);
collect_finally_tree (TREE_OPERAND (t, 0), t);
t = TREE_OPERAND (t, 1);
goto tailrecurse;
case TRY_CATCH_EXPR:
collect_finally_tree (TREE_OPERAND (t, 0), region);
t = TREE_OPERAND (t, 1);
goto tailrecurse;
case CATCH_EXPR:
t = CATCH_BODY (t);
goto tailrecurse;
case EH_FILTER_EXPR:
t = EH_FILTER_FAILURE (t);
goto tailrecurse;
case STATEMENT_LIST:
{
tree_stmt_iterator i;
for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
collect_finally_tree (tsi_stmt (i), region);
}
break;
default:
break;
}
}
static bool
outside_finally_tree (tree start, tree target)
{
struct finally_tree_node n, *p;
do
{
n.child = start;
p = htab_find (finally_tree, &n);
if (!p)
return true;
start = p->parent;
}
while (start != target);
return false;
}
struct leh_state
{
struct eh_region *cur_region;
struct eh_region *prev_try;
struct leh_tf_state *tf;
};
struct leh_tf_state
{
tree try_finally_expr;
tree *top_p;
struct leh_state *outer;
struct eh_region *region;
struct goto_queue_node {
tree stmt;
tree repl_stmt;
tree cont_stmt;
int index;
} *goto_queue;
size_t goto_queue_size;
size_t goto_queue_active;
varray_type dest_array;
tree fallthru_label;
tree eh_label;
bool may_fallthru;
bool may_return;
bool may_throw;
};
static void lower_eh_filter (struct leh_state *, tree *);
static void lower_eh_constructs_1 (struct leh_state *, tree *);
static int
goto_queue_cmp (const void *x, const void *y)
{
tree a = ((const struct goto_queue_node *)x)->stmt;
tree b = ((const struct goto_queue_node *)y)->stmt;
return (a == b ? 0 : a < b ? -1 : 1);
}
static tree
find_goto_replacement (struct leh_tf_state *tf, tree stmt)
{
struct goto_queue_node tmp, *ret;
tmp.stmt = stmt;
ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
sizeof (struct goto_queue_node), goto_queue_cmp);
return (ret ? ret->repl_stmt : NULL);
}
static void
replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
tree_stmt_iterator *tsi)
{
tree new, one, label;
new = find_goto_replacement (tf, *tp);
if (!new)
return;
one = expr_only (new);
if (one && TREE_CODE (one) == GOTO_EXPR)
{
*tp = one;
return;
}
label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
*tp = build_and_jump (&LABEL_EXPR_LABEL (label));
tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
}
static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
static void
replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
{
switch (TREE_CODE (t))
{
case GOTO_EXPR:
case RETURN_EXPR:
t = find_goto_replacement (tf, t);
if (t)
{
tsi_link_before (tsi, t, TSI_SAME_STMT);
tsi_delink (tsi);
return;
}
break;
case COND_EXPR:
replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
break;
case TRY_FINALLY_EXPR:
case TRY_CATCH_EXPR:
replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
break;
case CATCH_EXPR:
replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
break;
case EH_FILTER_EXPR:
replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
break;
case STATEMENT_LIST:
gcc_unreachable ();
default:
break;
}
tsi_next (tsi);
}
static void
replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
{
tree_stmt_iterator i = tsi_start (t);
while (!tsi_end_p (i))
replace_goto_queue_1 (tsi_stmt (i), tf, &i);
}
static void
replace_goto_queue (struct leh_tf_state *tf)
{
if (tf->goto_queue_active == 0)
return;
replace_goto_queue_stmt_list (*tf->top_p, tf);
}
static void
maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
{
struct leh_tf_state *tf = state->tf;
struct goto_queue_node *q;
size_t active, size;
int index;
if (!tf)
return;
switch (TREE_CODE (stmt))
{
case GOTO_EXPR:
{
tree lab = GOTO_DESTINATION (stmt);
if (TREE_CODE (lab) != LABEL_DECL)
return;
if (! outside_finally_tree (lab, tf->try_finally_expr))
return;
if (! tf->dest_array)
{
VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
VARRAY_PUSH_TREE (tf->dest_array, lab);
index = 0;
}
else
{
int n = VARRAY_ACTIVE_SIZE (tf->dest_array);
for (index = 0; index < n; ++index)
if (VARRAY_TREE (tf->dest_array, index) == lab)
break;
if (index == n)
VARRAY_PUSH_TREE (tf->dest_array, lab);
}
}
break;
case RETURN_EXPR:
tf->may_return = true;
index = -1;
break;
default:
gcc_unreachable ();
}
active = tf->goto_queue_active;
size = tf->goto_queue_size;
if (active >= size)
{
size = (size ? size * 2 : 32);
tf->goto_queue_size = size;
tf->goto_queue
= xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
}
q = &tf->goto_queue[active];
tf->goto_queue_active = active + 1;
memset (q, 0, sizeof (*q));
q->stmt = stmt;
q->index = index;
}
#ifdef ENABLE_CHECKING
static void
verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
{
struct leh_tf_state *tf = state->tf;
size_t i, n;
tree vec;
if (!tf)
return;
vec = SWITCH_LABELS (switch_expr);
n = TREE_VEC_LENGTH (vec);
for (i = 0; i < n; ++i)
{
tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
}
}
#else
#define verify_norecord_switch_expr(state, switch_expr)
#endif
static void
do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
tree *return_value_p)
{
tree ret_expr = TREE_OPERAND (q->stmt, 0);
tree x;
if (ret_expr)
{
switch (TREE_CODE (ret_expr))
{
case RESULT_DECL:
if (!*return_value_p)
*return_value_p = ret_expr;
else
gcc_assert (*return_value_p == ret_expr);
q->cont_stmt = q->stmt;
break;
case MODIFY_EXPR:
{
tree result = TREE_OPERAND (ret_expr, 0);
tree new, old = TREE_OPERAND (ret_expr, 1);
if (!*return_value_p)
{
if (aggregate_value_p (TREE_TYPE (result),
TREE_TYPE (current_function_decl)))
new = result;
else
new = create_tmp_var (TREE_TYPE (old), "rettmp");
*return_value_p = new;
}
else
new = *return_value_p;
x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
append_to_statement_list (x, &q->repl_stmt);
if (new == result)
x = result;
else
x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
}
default:
gcc_unreachable ();
}
}
else
{
q->cont_stmt = q->stmt;
}
if (mod)
append_to_statement_list (mod, &q->repl_stmt);
x = build1 (GOTO_EXPR, void_type_node, finlab);
append_to_statement_list (x, &q->repl_stmt);
}
static void
do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
{
tree x;
q->cont_stmt = q->stmt;
if (mod)
append_to_statement_list (mod, &q->repl_stmt);
x = build1 (GOTO_EXPR, void_type_node, finlab);
append_to_statement_list (x, &q->repl_stmt);
}
static void
frob_into_branch_around (tree *tp, tree lab, tree over)
{
tree x, op1;
op1 = TREE_OPERAND (*tp, 1);
*tp = TREE_OPERAND (*tp, 0);
if (block_may_fallthru (*tp))
{
if (!over)
over = create_artificial_label ();
x = build1 (GOTO_EXPR, void_type_node, over);
append_to_statement_list (x, tp);
}
if (lab)
{
x = build1 (LABEL_EXPR, void_type_node, lab);
append_to_statement_list (x, tp);
}
append_to_statement_list (op1, tp);
if (over)
{
x = build1 (LABEL_EXPR, void_type_node, over);
append_to_statement_list (x, tp);
}
}
static tree
lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
{
tree region = NULL;
t = unsave_expr_now (t);
if (outer_state->tf)
region = outer_state->tf->try_finally_expr;
collect_finally_tree (t, region);
return t;
}
static tree
lower_try_finally_fallthru_label (struct leh_tf_state *tf)
{
tree label = tf->fallthru_label;
if (!label)
{
label = create_artificial_label ();
tf->fallthru_label = label;
if (tf->outer->tf)
record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
}
return label;
}
static void
honor_protect_cleanup_actions (struct leh_state *outer_state,
struct leh_state *this_state,
struct leh_tf_state *tf)
{
tree protect_cleanup_actions, finally, x;
tree_stmt_iterator i;
bool finally_may_fallthru;
if (lang_protect_cleanup_actions)
protect_cleanup_actions = lang_protect_cleanup_actions ();
else
protect_cleanup_actions = NULL;
finally = TREE_OPERAND (*tf->top_p, 1);
finally_may_fallthru = block_may_fallthru (finally);
if (!finally_may_fallthru && !protect_cleanup_actions)
return;
if (this_state)
finally = lower_try_finally_dup_block (finally, outer_state);
if (finally_may_fallthru)
{
tree save_eptr, save_filt;
save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
save_filt = create_tmp_var (integer_type_node, "save_filt");
i = tsi_start (finally);
x = build (EXC_PTR_EXPR, ptr_type_node);
x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
x = build (FILTER_EXPR, integer_type_node);
x = build (MODIFY_EXPR, void_type_node, save_filt, x);
tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
i = tsi_last (finally);
x = build (EXC_PTR_EXPR, ptr_type_node);
x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
x = build (FILTER_EXPR, integer_type_node);
x = build (MODIFY_EXPR, void_type_node, x, save_filt);
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
x = build1 (RESX_EXPR, void_type_node,
build_int_cst (NULL_TREE,
get_eh_region_number (tf->region)));
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
}
if (protect_cleanup_actions)
{
x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
EH_FILTER_MUST_NOT_THROW (x) = 1;
finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
lower_eh_filter (outer_state, &finally);
}
else
lower_eh_constructs_1 (outer_state, &finally);
i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
if (tf->may_fallthru)
{
x = lower_try_finally_fallthru_label (tf);
x = build1 (GOTO_EXPR, void_type_node, x);
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
if (this_state)
maybe_record_in_goto_queue (this_state, x);
tf->may_fallthru = false;
}
x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
tf->may_throw = false;
}
static void
lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
{
tree x, finally, lab, return_val;
struct goto_queue_node *q, *qe;
if (tf->may_throw)
lab = tf->eh_label;
else
lab = create_artificial_label ();
finally = TREE_OPERAND (*tf->top_p, 1);
*tf->top_p = TREE_OPERAND (*tf->top_p, 0);
x = build1 (LABEL_EXPR, void_type_node, lab);
append_to_statement_list (x, tf->top_p);
return_val = NULL;
q = tf->goto_queue;
qe = q + tf->goto_queue_active;
for (; q < qe; ++q)
if (q->index < 0)
do_return_redirection (q, lab, NULL, &return_val);
else
do_goto_redirection (q, lab, NULL);
replace_goto_queue (tf);
lower_eh_constructs_1 (state, &finally);
append_to_statement_list (finally, tf->top_p);
}
static void
lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
{
struct goto_queue_node *q, *qe;
tree x, finally, finally_label;
finally = TREE_OPERAND (*tf->top_p, 1);
*tf->top_p = TREE_OPERAND (*tf->top_p, 0);
lower_eh_constructs_1 (state, &finally);
if (tf->may_throw)
{
x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
append_to_statement_list (x, tf->top_p);
append_to_statement_list (finally, tf->top_p);
x = build1 (RESX_EXPR, void_type_node,
build_int_cst (NULL_TREE,
get_eh_region_number (tf->region)));
append_to_statement_list (x, tf->top_p);
return;
}
if (tf->may_fallthru)
{
append_to_statement_list (finally, tf->top_p);
return;
}
finally_label = create_artificial_label ();
x = build1 (LABEL_EXPR, void_type_node, finally_label);
append_to_statement_list (x, tf->top_p);
append_to_statement_list (finally, tf->top_p);
q = tf->goto_queue;
qe = q + tf->goto_queue_active;
if (tf->may_return)
{
tree return_val = NULL;
for (; q < qe; ++q)
do_return_redirection (q, finally_label, NULL, &return_val);
replace_goto_queue (tf);
}
else
{
for (; q < qe; ++q)
do_goto_redirection (q, finally_label, NULL);
replace_goto_queue (tf);
if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
{
tf->fallthru_label = NULL;
return;
}
}
append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
}
static void
lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
{
tree finally, new_stmt;
tree x;
finally = TREE_OPERAND (*tf->top_p, 1);
*tf->top_p = TREE_OPERAND (*tf->top_p, 0);
new_stmt = NULL_TREE;
if (tf->may_fallthru)
{
x = lower_try_finally_dup_block (finally, state);
lower_eh_constructs_1 (state, &x);
append_to_statement_list (x, &new_stmt);
x = lower_try_finally_fallthru_label (tf);
x = build1 (GOTO_EXPR, void_type_node, x);
append_to_statement_list (x, &new_stmt);
}
if (tf->may_throw)
{
x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
append_to_statement_list (x, &new_stmt);
x = lower_try_finally_dup_block (finally, state);
lower_eh_constructs_1 (state, &x);
append_to_statement_list (x, &new_stmt);
x = build1 (RESX_EXPR, void_type_node,
build_int_cst (NULL_TREE,
get_eh_region_number (tf->region)));
append_to_statement_list (x, &new_stmt);
}
if (tf->goto_queue)
{
struct goto_queue_node *q, *qe;
tree return_val = NULL;
int return_index, index;
struct
{
struct goto_queue_node *q;
tree label;
} *labels;
if (tf->dest_array)
return_index = VARRAY_ACTIVE_SIZE (tf->dest_array);
else
return_index = 0;
labels = xcalloc (sizeof (*labels), return_index + 1);
q = tf->goto_queue;
qe = q + tf->goto_queue_active;
for (; q < qe; q++)
{
index = q->index < 0 ? return_index : q->index;
if (!labels[index].q)
labels[index].q = q;
}
for (index = 0; index < return_index + 1; index++)
{
tree lab;
q = labels[index].q;
if (! q)
continue;
lab = labels[index].label = create_artificial_label ();
if (index == return_index)
do_return_redirection (q, lab, NULL, &return_val);
else
do_goto_redirection (q, lab, NULL);
x = build1 (LABEL_EXPR, void_type_node, lab);
append_to_statement_list (x, &new_stmt);
x = lower_try_finally_dup_block (finally, state);
lower_eh_constructs_1 (state, &x);
append_to_statement_list (x, &new_stmt);
append_to_statement_list (q->cont_stmt, &new_stmt);
maybe_record_in_goto_queue (state, q->cont_stmt);
}
for (q = tf->goto_queue; q < qe; q++)
{
tree lab;
index = q->index < 0 ? return_index : q->index;
if (labels[index].q == q)
continue;
lab = labels[index].label;
if (index == return_index)
do_return_redirection (q, lab, NULL, &return_val);
else
do_goto_redirection (q, lab, NULL);
}
replace_goto_queue (tf);
free (labels);
}
append_to_statement_list (new_stmt, tf->top_p);
}
static void
lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
{
struct goto_queue_node *q, *qe;
tree return_val = NULL;
tree finally, finally_tmp, finally_label;
int return_index, eh_index, fallthru_index;
int nlabels, ndests, j, last_case_index;
tree case_label_vec, switch_stmt, last_case, switch_body;
tree x;
finally = TREE_OPERAND (*tf->top_p, 1);
*tf->top_p = TREE_OPERAND (*tf->top_p, 0);
lower_eh_constructs_1 (state, &finally);
if (tf->dest_array)
nlabels = VARRAY_ACTIVE_SIZE (tf->dest_array);
else
nlabels = 0;
return_index = nlabels;
eh_index = return_index + tf->may_return;
fallthru_index = eh_index + tf->may_throw;
ndests = fallthru_index + tf->may_fallthru;
finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
finally_label = create_artificial_label ();
case_label_vec = make_tree_vec (ndests);
switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
NULL_TREE, case_label_vec);
switch_body = NULL;
last_case = NULL;
last_case_index = 0;
if (tf->may_fallthru)
{
x = build (MODIFY_EXPR, void_type_node, finally_tmp,
build_int_cst (NULL_TREE, fallthru_index));
append_to_statement_list (x, tf->top_p);
if (tf->may_throw)
{
x = build1 (GOTO_EXPR, void_type_node, finally_label);
append_to_statement_list (x, tf->top_p);
}
last_case = build (CASE_LABEL_EXPR, void_type_node,
build_int_cst (NULL_TREE, fallthru_index), NULL,
create_artificial_label ());
TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
last_case_index++;
x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
append_to_statement_list (x, &switch_body);
x = lower_try_finally_fallthru_label (tf);
x = build1 (GOTO_EXPR, void_type_node, x);
append_to_statement_list (x, &switch_body);
}
if (tf->may_throw)
{
x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
append_to_statement_list (x, tf->top_p);
x = build (MODIFY_EXPR, void_type_node, finally_tmp,
build_int_cst (NULL_TREE, eh_index));
append_to_statement_list (x, tf->top_p);
last_case = build (CASE_LABEL_EXPR, void_type_node,
build_int_cst (NULL_TREE, eh_index), NULL,
create_artificial_label ());
TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
last_case_index++;
x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
append_to_statement_list (x, &switch_body);
x = build1 (RESX_EXPR, void_type_node,
build_int_cst (NULL_TREE,
get_eh_region_number (tf->region)));
append_to_statement_list (x, &switch_body);
}
x = build1 (LABEL_EXPR, void_type_node, finally_label);
append_to_statement_list (x, tf->top_p);
append_to_statement_list (finally, tf->top_p);
q = tf->goto_queue;
qe = q + tf->goto_queue_active;
j = last_case_index + tf->may_return;
for (; q < qe; ++q)
{
tree mod;
int switch_id, case_index;
if (q->index < 0)
{
mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
build_int_cst (NULL_TREE, return_index));
do_return_redirection (q, finally_label, mod, &return_val);
switch_id = return_index;
}
else
{
mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
build_int_cst (NULL_TREE, q->index));
do_goto_redirection (q, finally_label, mod);
switch_id = q->index;
}
case_index = j + q->index;
if (!TREE_VEC_ELT (case_label_vec, case_index))
TREE_VEC_ELT (case_label_vec, case_index)
= build (CASE_LABEL_EXPR, void_type_node,
build_int_cst (NULL_TREE, switch_id), NULL,
q->cont_stmt);
}
for (j = last_case_index; j < last_case_index + nlabels; j++)
{
tree label;
tree cont_stmt;
last_case = TREE_VEC_ELT (case_label_vec, j);
gcc_assert (last_case);
cont_stmt = CASE_LABEL (last_case);
label = create_artificial_label ();
CASE_LABEL (last_case) = label;
x = build (LABEL_EXPR, void_type_node, label);
append_to_statement_list (x, &switch_body);
append_to_statement_list (cont_stmt, &switch_body);
maybe_record_in_goto_queue (state, cont_stmt);
}
replace_goto_queue (tf);
CASE_LOW (last_case) = NULL;
sort_case_labels (case_label_vec);
append_to_statement_list (switch_stmt, tf->top_p);
append_to_statement_list (switch_body, tf->top_p);
}
static bool
decide_copy_try_finally (int ndests, tree finally)
{
int f_estimate, sw_estimate;
if (!optimize)
return false;
f_estimate = estimate_num_insns (finally);
f_estimate = (f_estimate + 1) * ndests;
sw_estimate = 10 + 2 * ndests;
if (optimize_size)
return f_estimate < sw_estimate;
if (optimize > 1)
return f_estimate < 100 || f_estimate < sw_estimate * 2;
else
return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
}
static void
lower_try_finally (struct leh_state *state, tree *tp)
{
struct leh_tf_state this_tf;
struct leh_state this_state;
int ndests;
memset (&this_tf, 0, sizeof (this_tf));
this_tf.try_finally_expr = *tp;
this_tf.top_p = tp;
this_tf.outer = state;
if (using_eh_for_cleanups_p)
this_tf.region
= gen_eh_region_cleanup (state->cur_region, state->prev_try);
else
this_tf.region = NULL;
this_state.cur_region = this_tf.region;
this_state.prev_try = state->prev_try;
this_state.tf = &this_tf;
lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
if (using_eh_for_cleanups_p)
this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
if (this_tf.may_throw)
{
this_tf.eh_label = create_artificial_label ();
set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
honor_protect_cleanup_actions (state, &this_state, &this_tf);
}
if (this_tf.goto_queue_active > 1)
qsort (this_tf.goto_queue, this_tf.goto_queue_active,
sizeof (struct goto_queue_node), goto_queue_cmp);
if (this_tf.dest_array)
ndests = VARRAY_ACTIVE_SIZE (this_tf.dest_array);
else
ndests = 0;
ndests += this_tf.may_fallthru;
ndests += this_tf.may_return;
ndests += this_tf.may_throw;
if (ndests == 0)
*tp = TREE_OPERAND (*tp, 0);
else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
lower_try_finally_nofallthru (state, &this_tf);
else if (ndests == 1)
lower_try_finally_onedest (state, &this_tf);
else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
lower_try_finally_copy (state, &this_tf);
else
lower_try_finally_switch (state, &this_tf);
if (this_tf.fallthru_label)
{
tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
append_to_statement_list (x, tp);
}
if (this_tf.goto_queue)
free (this_tf.goto_queue);
}
static void
lower_catch (struct leh_state *state, tree *tp)
{
struct eh_region *try_region;
struct leh_state this_state;
tree_stmt_iterator i;
tree out_label;
try_region = gen_eh_region_try (state->cur_region);
this_state.cur_region = try_region;
this_state.prev_try = try_region;
this_state.tf = state->tf;
lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
if (!get_eh_region_may_contain_throw (try_region))
{
*tp = TREE_OPERAND (*tp, 0);
return;
}
out_label = NULL;
for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
{
struct eh_region *catch_region;
tree catch, x, eh_label;
catch = tsi_stmt (i);
catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
this_state.cur_region = catch_region;
this_state.prev_try = state->prev_try;
lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
eh_label = create_artificial_label ();
set_eh_region_tree_label (catch_region, eh_label);
x = build1 (LABEL_EXPR, void_type_node, eh_label);
tsi_link_before (&i, x, TSI_SAME_STMT);
if (block_may_fallthru (CATCH_BODY (catch)))
{
if (!out_label)
out_label = create_artificial_label ();
x = build1 (GOTO_EXPR, void_type_node, out_label);
append_to_statement_list (x, &CATCH_BODY (catch));
}
tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
tsi_delink (&i);
}
frob_into_branch_around (tp, NULL, out_label);
}
static void
lower_eh_filter (struct leh_state *state, tree *tp)
{
struct leh_state this_state;
struct eh_region *this_region;
tree inner = expr_first (TREE_OPERAND (*tp, 1));
tree eh_label;
if (EH_FILTER_MUST_NOT_THROW (inner))
this_region = gen_eh_region_must_not_throw (state->cur_region);
else
this_region = gen_eh_region_allowed (state->cur_region,
EH_FILTER_TYPES (inner));
this_state = *state;
this_state.cur_region = this_region;
lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
if (!get_eh_region_may_contain_throw (this_region))
{
*tp = TREE_OPERAND (*tp, 0);
return;
}
lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
eh_label = create_artificial_label ();
set_eh_region_tree_label (this_region, eh_label);
frob_into_branch_around (tp, eh_label, NULL);
}
static void
lower_cleanup (struct leh_state *state, tree *tp)
{
struct leh_state this_state;
struct eh_region *this_region;
struct leh_tf_state fake_tf;
if (!flag_exceptions)
{
*tp = TREE_OPERAND (*tp, 0);
lower_eh_constructs_1 (state, tp);
return;
}
this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
this_state = *state;
this_state.cur_region = this_region;
lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
if (!get_eh_region_may_contain_throw (this_region))
{
*tp = TREE_OPERAND (*tp, 0);
return;
}
memset (&fake_tf, 0, sizeof (fake_tf));
fake_tf.top_p = tp;
fake_tf.outer = state;
fake_tf.region = this_region;
fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
fake_tf.may_throw = true;
fake_tf.eh_label = create_artificial_label ();
set_eh_region_tree_label (this_region, fake_tf.eh_label);
honor_protect_cleanup_actions (state, NULL, &fake_tf);
if (fake_tf.may_throw)
{
lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
}
else
{
*tp = TREE_OPERAND (*tp, 0);
if (fake_tf.fallthru_label)
{
tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
append_to_statement_list (x, tp);
}
}
}
static void
lower_eh_constructs_1 (struct leh_state *state, tree *tp)
{
tree_stmt_iterator i;
tree t = *tp;
switch (TREE_CODE (t))
{
case COND_EXPR:
lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
break;
case CALL_EXPR:
if (state->cur_region && tree_could_throw_p (t))
{
record_stmt_eh_region (state->cur_region, t);
note_eh_region_may_contain_throw (state->cur_region);
}
break;
case MODIFY_EXPR:
if (state->cur_region && tree_could_throw_p (t))
{
tree op;
record_stmt_eh_region (state->cur_region, t);
note_eh_region_may_contain_throw (state->cur_region);
op = get_call_expr_in (t);
if (op)
record_stmt_eh_region (state->cur_region, op);
}
break;
case GOTO_EXPR:
case RETURN_EXPR:
maybe_record_in_goto_queue (state, t);
break;
case SWITCH_EXPR:
verify_norecord_switch_expr (state, t);
break;
case TRY_FINALLY_EXPR:
lower_try_finally (state, tp);
break;
case TRY_CATCH_EXPR:
i = tsi_start (TREE_OPERAND (t, 1));
switch (TREE_CODE (tsi_stmt (i)))
{
case CATCH_EXPR:
lower_catch (state, tp);
break;
case EH_FILTER_EXPR:
lower_eh_filter (state, tp);
break;
default:
lower_cleanup (state, tp);
break;
}
break;
case STATEMENT_LIST:
for (i = tsi_start (t); !tsi_end_p (i); )
{
lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
t = tsi_stmt (i);
if (TREE_CODE (t) == STATEMENT_LIST)
{
tsi_link_before (&i, t, TSI_SAME_STMT);
tsi_delink (&i);
}
else
tsi_next (&i);
}
break;
default:
break;
}
}
static void
lower_eh_constructs (void)
{
struct leh_state null_state;
tree *tp = &DECL_SAVED_TREE (current_function_decl);
finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
throw_stmt_table = htab_create_ggc (31, struct_ptr_hash, struct_ptr_eq,
ggc_free);
collect_finally_tree (*tp, NULL);
memset (&null_state, 0, sizeof (null_state));
lower_eh_constructs_1 (&null_state, tp);
htab_delete (finally_tree);
collect_eh_region_array ();
}
struct tree_opt_pass pass_lower_eh =
{
"eh",
NULL,
lower_eh_constructs,
NULL,
NULL,
0,
TV_TREE_EH,
PROP_gimple_lcf,
PROP_gimple_leh,
PROP_gimple_lcf,
0,
TODO_dump_func,
0
};
static void
make_eh_edge (struct eh_region *region, void *data)
{
tree stmt, lab;
basic_block src, dst;
stmt = data;
lab = get_eh_region_tree_label (region);
src = bb_for_stmt (stmt);
dst = label_to_block (lab);
make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
}
void
make_eh_edges (tree stmt)
{
int region_nr;
bool is_resx;
if (TREE_CODE (stmt) == RESX_EXPR)
{
region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
is_resx = true;
}
else
{
region_nr = lookup_stmt_eh_region (stmt);
if (region_nr < 0)
return;
is_resx = false;
}
foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
}
bool
tree_could_trap_p (tree expr)
{
enum tree_code code = TREE_CODE (expr);
bool honor_nans = false;
bool honor_snans = false;
bool fp_operation = false;
bool honor_trapv = false;
tree t, base, idx;
if (TREE_CODE_CLASS (code) == tcc_comparison
|| TREE_CODE_CLASS (code) == tcc_unary
|| TREE_CODE_CLASS (code) == tcc_binary)
{
t = TREE_TYPE (expr);
fp_operation = FLOAT_TYPE_P (t);
if (fp_operation)
{
honor_nans = flag_trapping_math && !flag_finite_math_only;
honor_snans = flag_signaling_nans != 0;
}
else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
honor_trapv = true;
}
restart:
switch (code)
{
case COMPONENT_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
case BIT_FIELD_REF:
case WITH_SIZE_EXPR:
expr = TREE_OPERAND (expr, 0);
code = TREE_CODE (expr);
goto restart;
case ARRAY_RANGE_REF:
if (!TREE_THIS_NOTRAP (expr))
return true;
base = TREE_OPERAND (expr, 0);
return tree_could_trap_p (base);
case ARRAY_REF:
base = TREE_OPERAND (expr, 0);
idx = TREE_OPERAND (expr, 1);
if (tree_could_trap_p (base))
return true;
if (TREE_THIS_NOTRAP (expr))
return false;
return !in_array_bounds_p (expr);
case INDIRECT_REF:
case ALIGN_INDIRECT_REF:
case MISALIGNED_INDIRECT_REF:
return !TREE_THIS_NOTRAP (expr);
case ASM_EXPR:
return TREE_THIS_VOLATILE (expr);
case TRUNC_DIV_EXPR:
case CEIL_DIV_EXPR:
case FLOOR_DIV_EXPR:
case ROUND_DIV_EXPR:
case EXACT_DIV_EXPR:
case CEIL_MOD_EXPR:
case FLOOR_MOD_EXPR:
case ROUND_MOD_EXPR:
case TRUNC_MOD_EXPR:
case RDIV_EXPR:
if (honor_snans || honor_trapv)
return true;
if (fp_operation && flag_trapping_math)
return true;
t = TREE_OPERAND (expr, 1);
if (!TREE_CONSTANT (t) || integer_zerop (t))
return true;
return false;
case LT_EXPR:
case LE_EXPR:
case GT_EXPR:
case GE_EXPR:
case LTGT_EXPR:
return honor_nans;
case EQ_EXPR:
case NE_EXPR:
case UNORDERED_EXPR:
case ORDERED_EXPR:
case UNLT_EXPR:
case UNLE_EXPR:
case UNGT_EXPR:
case UNGE_EXPR:
case UNEQ_EXPR:
return honor_snans;
case CONVERT_EXPR:
case FIX_TRUNC_EXPR:
case FIX_CEIL_EXPR:
case FIX_FLOOR_EXPR:
case FIX_ROUND_EXPR:
return honor_nans;
case NEGATE_EXPR:
case ABS_EXPR:
case CONJ_EXPR:
if (honor_trapv)
return true;
return false;
case PLUS_EXPR:
case MINUS_EXPR:
case MULT_EXPR:
if (fp_operation && flag_trapping_math)
return true;
if (honor_trapv)
return true;
return false;
case CALL_EXPR:
t = get_callee_fndecl (expr);
if (!t || !DECL_P (t) || DECL_WEAK (t))
return true;
return false;
default:
if (fp_operation && flag_trapping_math)
return true;
return false;
}
}
bool
tree_could_throw_p (tree t)
{
if (!flag_exceptions)
return false;
if (TREE_CODE (t) == MODIFY_EXPR)
{
if (flag_non_call_exceptions
&& tree_could_trap_p (TREE_OPERAND (t, 0)))
return true;
t = TREE_OPERAND (t, 1);
}
if (TREE_CODE (t) == WITH_SIZE_EXPR)
t = TREE_OPERAND (t, 0);
if (TREE_CODE (t) == CALL_EXPR)
return (call_expr_flags (t) & ECF_NOTHROW) == 0;
if (flag_non_call_exceptions)
return tree_could_trap_p (t);
return false;
}
bool
tree_can_throw_internal (tree stmt)
{
int region_nr = lookup_stmt_eh_region (stmt);
if (region_nr < 0)
return false;
return can_throw_internal_1 (region_nr);
}
bool
tree_can_throw_external (tree stmt)
{
int region_nr = lookup_stmt_eh_region (stmt);
if (region_nr < 0)
return false;
return can_throw_external_1 (region_nr);
}
bool
maybe_clean_eh_stmt (tree stmt)
{
if (!tree_could_throw_p (stmt))
if (remove_stmt_from_eh_region (stmt))
return true;
return false;
}
#include "gt-tree-eh.h"