#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "rtl.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "flags.h"
#include "real.h"
#include "insn-config.h"
#include "recog.h"
#include "basic-block.h"
#include "output.h"
#include "tm_p.h"
#include "function.h"
#include "tree-pass.h"
#include "timevar.h"
#include "insn-attr.h"
#ifdef OPTIMIZE_MODE_SWITCHING
struct seginfo
{
int mode;
rtx insn_ptr;
int bbnum;
struct seginfo *next;
HARD_REG_SET regs_live;
};
struct bb_info
{
struct seginfo *seginfo;
int computing;
};
static sbitmap *antic;
static sbitmap *transp;
static sbitmap *comp;
static struct seginfo * new_seginfo (int, rtx, int, HARD_REG_SET);
static void add_seginfo (struct bb_info *, struct seginfo *);
static void reg_dies (rtx, HARD_REG_SET);
static void reg_becomes_live (rtx, rtx, void *);
static void make_preds_opaque (basic_block, int);
static struct seginfo *
new_seginfo (int mode, rtx insn, int bb, HARD_REG_SET regs_live)
{
struct seginfo *ptr;
ptr = XNEW (struct seginfo);
ptr->mode = mode;
ptr->insn_ptr = insn;
ptr->bbnum = bb;
ptr->next = NULL;
COPY_HARD_REG_SET (ptr->regs_live, regs_live);
return ptr;
}
static void
add_seginfo (struct bb_info *head, struct seginfo *info)
{
struct seginfo *ptr;
if (head->seginfo == NULL)
head->seginfo = info;
else
{
ptr = head->seginfo;
while (ptr->next != NULL)
ptr = ptr->next;
ptr->next = info;
}
}
static void
make_preds_opaque (basic_block b, int j)
{
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, b->preds)
{
basic_block pb = e->src;
if (e->aux || ! TEST_BIT (transp[pb->index], j))
continue;
RESET_BIT (transp[pb->index], j);
make_preds_opaque (pb, j);
}
}
static void
reg_dies (rtx reg, HARD_REG_SET live)
{
int regno, nregs;
if (!REG_P (reg))
return;
regno = REGNO (reg);
if (regno < FIRST_PSEUDO_REGISTER)
for (nregs = hard_regno_nregs[regno][GET_MODE (reg)] - 1; nregs >= 0;
nregs--)
CLEAR_HARD_REG_BIT (live, regno + nregs);
}
static void
reg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *live)
{
int regno, nregs;
if (GET_CODE (reg) == SUBREG)
reg = SUBREG_REG (reg);
if (!REG_P (reg))
return;
regno = REGNO (reg);
if (regno < FIRST_PSEUDO_REGISTER)
for (nregs = hard_regno_nregs[regno][GET_MODE (reg)] - 1; nregs >= 0;
nregs--)
SET_HARD_REG_BIT (* (HARD_REG_SET *) live, regno + nregs);
}
#if defined (MODE_ENTRY) != defined (MODE_EXIT)
#error "Both MODE_ENTRY and MODE_EXIT must be defined"
#endif
#if defined (MODE_ENTRY) && defined (MODE_EXIT)
static basic_block
create_pre_exit (int n_entities, int *entity_map, const int *num_modes)
{
edge eg;
edge_iterator ei;
basic_block pre_exit;
pre_exit = 0;
FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR->preds)
if (eg->flags & EDGE_FALLTHRU)
{
basic_block src_bb = eg->src;
regset live_at_end = src_bb->il.rtl->global_live_at_end;
rtx last_insn, ret_reg;
gcc_assert (!pre_exit);
if (EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 1
&& NONJUMP_INSN_P ((last_insn = BB_END (src_bb)))
&& GET_CODE (PATTERN (last_insn)) == USE
&& GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG)
{
int ret_start = REGNO (ret_reg);
int nregs = hard_regno_nregs[ret_start][GET_MODE (ret_reg)];
int ret_end = ret_start + nregs;
int short_block = 0;
int maybe_builtin_apply = 0;
int forced_late_switch = 0;
rtx before_return_copy;
do
{
rtx return_copy = PREV_INSN (last_insn);
rtx return_copy_pat, copy_reg;
int copy_start, copy_num;
int j;
if (INSN_P (return_copy))
{
if (GET_CODE (PATTERN (return_copy)) == USE
&& GET_CODE (XEXP (PATTERN (return_copy), 0)) == REG
&& (FUNCTION_VALUE_REGNO_P
(REGNO (XEXP (PATTERN (return_copy), 0)))))
{
maybe_builtin_apply = 1;
last_insn = return_copy;
continue;
}
return_copy_pat = single_set (return_copy);
if (!return_copy_pat)
{
return_copy_pat = PATTERN (return_copy);
if (GET_CODE (return_copy_pat) != CLOBBER)
break;
}
copy_reg = SET_DEST (return_copy_pat);
if (GET_CODE (copy_reg) == REG)
copy_start = REGNO (copy_reg);
else if (GET_CODE (copy_reg) == SUBREG
&& GET_CODE (SUBREG_REG (copy_reg)) == REG)
copy_start = REGNO (SUBREG_REG (copy_reg));
else
break;
if (copy_start >= FIRST_PSEUDO_REGISTER)
break;
copy_num
= hard_regno_nregs[copy_start][GET_MODE (copy_reg)];
for (j = n_entities - 1; j >= 0; j--)
{
int e = entity_map[j];
int mode = MODE_NEEDED (e, return_copy);
if (mode != num_modes[e] && mode != MODE_EXIT (e))
break;
}
if (j >= 0)
{
if (copy_start >= ret_start
&& copy_start + copy_num <= ret_end
&& OBJECT_P (SET_SRC (return_copy_pat)))
forced_late_switch = 1;
break;
}
if (copy_start >= ret_start
&& copy_start + copy_num <= ret_end)
nregs -= copy_num;
else if (!maybe_builtin_apply
|| !FUNCTION_VALUE_REGNO_P (copy_start))
break;
last_insn = return_copy;
}
if (return_copy == BB_HEAD (src_bb))
{
short_block = 1;
break;
}
last_insn = return_copy;
}
while (nregs);
gcc_assert (!nregs
|| forced_late_switch
|| short_block
|| !(CLASS_LIKELY_SPILLED_P
(REGNO_REG_CLASS (ret_start)))
|| (nregs
!= hard_regno_nregs[ret_start][GET_MODE (ret_reg)])
|| (GET_MODE_CLASS (GET_MODE (ret_reg)) != MODE_INT
&& nregs != 1));
if (INSN_P (last_insn))
{
before_return_copy
= emit_note_before (NOTE_INSN_DELETED, last_insn);
if (last_insn != BB_HEAD (src_bb))
src_bb = split_block (src_bb,
PREV_INSN (before_return_copy))->dest;
}
else
before_return_copy = last_insn;
pre_exit = split_block (src_bb, before_return_copy)->src;
}
else
{
pre_exit = split_edge (eg);
COPY_REG_SET (pre_exit->il.rtl->global_live_at_start, live_at_end);
COPY_REG_SET (pre_exit->il.rtl->global_live_at_end, live_at_end);
}
}
return pre_exit;
}
#endif
static int
optimize_mode_switching (void)
{
rtx insn;
int e;
basic_block bb;
int need_commit = 0;
sbitmap *kill;
struct edge_list *edge_list;
static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING;
#define N_ENTITIES ARRAY_SIZE (num_modes)
int entity_map[N_ENTITIES];
struct bb_info *bb_info[N_ENTITIES];
int i, j;
int n_entities;
int max_num_modes = 0;
bool emited = false;
basic_block post_entry ATTRIBUTE_UNUSED, pre_exit ATTRIBUTE_UNUSED;
clear_bb_flags ();
for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--)
if (OPTIMIZE_MODE_SWITCHING (e))
{
int entry_exit_extra = 0;
#if defined (MODE_ENTRY) && defined (MODE_EXIT)
entry_exit_extra = 3;
#endif
bb_info[n_entities]
= XCNEWVEC (struct bb_info, last_basic_block + entry_exit_extra);
entity_map[n_entities++] = e;
if (num_modes[e] > max_num_modes)
max_num_modes = num_modes[e];
}
if (! n_entities)
return 0;
#if defined (MODE_ENTRY) && defined (MODE_EXIT)
post_entry = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
pre_exit = create_pre_exit (n_entities, entity_map, num_modes);
#endif
antic = sbitmap_vector_alloc (last_basic_block, n_entities);
transp = sbitmap_vector_alloc (last_basic_block, n_entities);
comp = sbitmap_vector_alloc (last_basic_block, n_entities);
sbitmap_vector_ones (transp, last_basic_block);
for (j = n_entities - 1; j >= 0; j--)
{
int e = entity_map[j];
int no_mode = num_modes[e];
struct bb_info *info = bb_info[j];
FOR_EACH_BB (bb)
{
struct seginfo *ptr;
int last_mode = no_mode;
HARD_REG_SET live_now;
REG_SET_TO_HARD_REG_SET (live_now,
bb->il.rtl->global_live_at_start);
{
edge_iterator ei;
edge e;
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->flags & EDGE_COMPLEX)
break;
if (e)
{
ptr = new_seginfo (no_mode, BB_HEAD (bb), bb->index, live_now);
add_seginfo (info + bb->index, ptr);
RESET_BIT (transp[bb->index], j);
}
}
for (insn = BB_HEAD (bb);
insn != NULL && insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
{
int mode = MODE_NEEDED (e, insn);
rtx link;
if (mode != no_mode && mode != last_mode)
{
last_mode = mode;
ptr = new_seginfo (mode, insn, bb->index, live_now);
add_seginfo (info + bb->index, ptr);
RESET_BIT (transp[bb->index], j);
}
#ifdef MODE_AFTER
last_mode = MODE_AFTER (last_mode, insn);
#endif
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_DEAD)
reg_dies (XEXP (link, 0), live_now);
note_stores (PATTERN (insn), reg_becomes_live, &live_now);
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_UNUSED)
reg_dies (XEXP (link, 0), live_now);
}
}
info[bb->index].computing = last_mode;
if (last_mode == no_mode)
{
ptr = new_seginfo (no_mode, BB_END (bb), bb->index, live_now);
add_seginfo (info + bb->index, ptr);
}
}
#if defined (MODE_ENTRY) && defined (MODE_EXIT)
{
int mode = MODE_ENTRY (e);
if (mode != no_mode)
{
bb = post_entry;
RESET_BIT (transp[bb->index], j);
info[bb->index].computing = mode;
if (pre_exit)
info[pre_exit->index].seginfo->mode = MODE_EXIT (e);
}
}
#endif
}
kill = sbitmap_vector_alloc (last_basic_block, n_entities);
for (i = 0; i < max_num_modes; i++)
{
int current_mode[N_ENTITIES];
sbitmap *delete;
sbitmap *insert;
sbitmap_vector_zero (antic, last_basic_block);
sbitmap_vector_zero (comp, last_basic_block);
for (j = n_entities - 1; j >= 0; j--)
{
int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i);
struct bb_info *info = bb_info[j];
FOR_EACH_BB (bb)
{
if (info[bb->index].seginfo->mode == m)
SET_BIT (antic[bb->index], j);
if (info[bb->index].computing == m)
SET_BIT (comp[bb->index], j);
}
}
FOR_EACH_BB (bb)
sbitmap_not (kill[bb->index], transp[bb->index]);
edge_list = pre_edge_lcm (n_entities, transp, comp, antic,
kill, &insert, &delete);
for (j = n_entities - 1; j >= 0; j--)
{
int no_mode = num_modes[entity_map[j]];
for (e = NUM_EDGES (edge_list) - 1; e >= 0; e--)
{
edge eg = INDEX_EDGE (edge_list, e);
int mode;
basic_block src_bb;
HARD_REG_SET live_at_edge;
rtx mode_set;
eg->aux = 0;
if (! TEST_BIT (insert[e], j))
continue;
eg->aux = (void *)1;
mode = current_mode[j];
src_bb = eg->src;
REG_SET_TO_HARD_REG_SET (live_at_edge,
src_bb->il.rtl->global_live_at_end);
start_sequence ();
EMIT_MODE_SET (entity_map[j], mode, live_at_edge);
mode_set = get_insns ();
end_sequence ();
if (mode_set == NULL_RTX)
continue;
gcc_assert (! (eg->flags & EDGE_ABNORMAL));
need_commit = 1;
insert_insn_on_edge (mode_set, eg);
}
FOR_EACH_BB_REVERSE (bb)
if (TEST_BIT (delete[bb->index], j))
{
make_preds_opaque (bb, j);
bb_info[j][bb->index].seginfo->mode = no_mode;
}
}
sbitmap_vector_free (delete);
sbitmap_vector_free (insert);
clear_aux_for_edges ();
free_edge_list (edge_list);
}
for (j = n_entities - 1; j >= 0; j--)
{
int no_mode = num_modes[entity_map[j]];
FOR_EACH_BB_REVERSE (bb)
{
struct seginfo *ptr, *next;
for (ptr = bb_info[j][bb->index].seginfo; ptr; ptr = next)
{
next = ptr->next;
if (ptr->mode != no_mode)
{
rtx mode_set;
start_sequence ();
EMIT_MODE_SET (entity_map[j], ptr->mode, ptr->regs_live);
mode_set = get_insns ();
end_sequence ();
if (mode_set != NULL_RTX)
{
emited = true;
if (NOTE_P (ptr->insn_ptr)
&& (NOTE_LINE_NUMBER (ptr->insn_ptr)
== NOTE_INSN_BASIC_BLOCK))
emit_insn_after (mode_set, ptr->insn_ptr);
else
emit_insn_before (mode_set, ptr->insn_ptr);
}
}
free (ptr);
}
}
free (bb_info[j]);
}
sbitmap_vector_free (kill);
sbitmap_vector_free (antic);
sbitmap_vector_free (transp);
sbitmap_vector_free (comp);
if (need_commit)
commit_edge_insertions ();
#if defined (MODE_ENTRY) && defined (MODE_EXIT)
cleanup_cfg (CLEANUP_NO_INSN_DEL);
#else
if (!need_commit && !emited)
return 0;
#endif
max_regno = max_reg_num ();
allocate_reg_info (max_regno, FALSE, FALSE);
update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
(PROP_DEATH_NOTES | PROP_KILL_DEAD_CODE
| PROP_SCAN_DEAD_CODE));
return 1;
}
#endif
static bool
gate_mode_switching (void)
{
#ifdef OPTIMIZE_MODE_SWITCHING
return true;
#else
return false;
#endif
}
static unsigned int
rest_of_handle_mode_switching (void)
{
#ifdef OPTIMIZE_MODE_SWITCHING
no_new_pseudos = 0;
optimize_mode_switching ();
no_new_pseudos = 1;
#endif
return 0;
}
struct tree_opt_pass pass_mode_switching =
{
"mode-sw",
gate_mode_switching,
rest_of_handle_mode_switching,
NULL,
NULL,
0,
TV_MODE_SWITCH,
0,
0,
0,
0,
TODO_dump_func,
0
};