#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "toplev.h"
#include "rtl.h"
#include "tm_p.h"
#include "hard-reg-set.h"
#include "regs.h"
#include "function.h"
#include "flags.h"
#include "insn-config.h"
#include "insn-attr.h"
#include "except.h"
#include "toplev.h"
#include "recog.h"
#include "cfglayout.h"
#include "params.h"
#include "sched-int.h"
#include "target.h"
static int target_n_insns;
static int sched_n_insns;
static void init_ready_list (struct ready_list *);
static int can_schedule_ready_p (rtx);
static int new_ready (rtx);
static int schedule_more_p (void);
static const char *ebb_print_insn (rtx, int);
static int rank (rtx, rtx);
static int contributes_to_priority (rtx, rtx);
static void compute_jump_reg_dependencies (rtx, regset, regset, regset);
static basic_block earliest_block_with_similiar_load (basic_block, rtx);
static void add_deps_for_risky_insns (rtx, rtx);
static basic_block schedule_ebb (rtx, rtx);
static basic_block fix_basic_block_boundaries (basic_block, basic_block, rtx,
rtx);
static void add_missing_bbs (rtx, basic_block, basic_block);
static int
schedule_more_p (void)
{
return sched_n_insns < target_n_insns;
}
static void
init_ready_list (struct ready_list *ready)
{
rtx prev_head = current_sched_info->prev_head;
rtx next_tail = current_sched_info->next_tail;
rtx insn;
target_n_insns = 0;
sched_n_insns = 0;
#if 0
if (sched_verbose >= 5)
debug_dependencies ();
#endif
for (insn = NEXT_INSN (prev_head); insn != next_tail; insn = NEXT_INSN (insn))
{
if (INSN_DEP_COUNT (insn) == 0)
ready_add (ready, insn);
target_n_insns++;
}
}
static int
can_schedule_ready_p (rtx insn ATTRIBUTE_UNUSED)
{
sched_n_insns++;
return 1;
}
static int
new_ready (rtx next ATTRIBUTE_UNUSED)
{
return 1;
}
static const char *
ebb_print_insn (rtx insn, int aligned ATTRIBUTE_UNUSED)
{
static char tmp[80];
sprintf (tmp, "%4d", INSN_UID (insn));
return tmp;
}
static int
rank (rtx insn1, rtx insn2)
{
basic_block bb1 = BLOCK_FOR_INSN (insn1);
basic_block bb2 = BLOCK_FOR_INSN (insn2);
if (bb1->count > bb2->count
|| bb1->frequency > bb2->frequency)
return -1;
if (bb1->count < bb2->count
|| bb1->frequency < bb2->frequency)
return 1;
return 0;
}
static int
contributes_to_priority (rtx next ATTRIBUTE_UNUSED,
rtx insn ATTRIBUTE_UNUSED)
{
return 1;
}
static void
compute_jump_reg_dependencies (rtx insn, regset cond_set, regset used,
regset set)
{
basic_block b = BLOCK_FOR_INSN (insn);
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, b->succs)
if (e->flags & EDGE_FALLTHRU)
bitmap_and (set, e->dest->global_live_at_start, cond_set);
else
bitmap_ior_into (used, e->dest->global_live_at_start);
}
static struct sched_info ebb_sched_info =
{
init_ready_list,
can_schedule_ready_p,
schedule_more_p,
new_ready,
rank,
ebb_print_insn,
contributes_to_priority,
compute_jump_reg_dependencies,
NULL, NULL,
NULL, NULL,
0, 1, 0
};
static void
add_missing_bbs (rtx before, basic_block first, basic_block last)
{
for (; last != first->prev_bb; last = last->prev_bb)
{
before = emit_note_before (NOTE_INSN_BASIC_BLOCK, before);
NOTE_BASIC_BLOCK (before) = last;
BB_HEAD (last) = before;
BB_END (last) = before;
update_bb_for_insn (last);
}
}
static basic_block
fix_basic_block_boundaries (basic_block bb, basic_block last, rtx head,
rtx tail)
{
rtx insn = head;
rtx last_inside = BB_HEAD (bb);
rtx aftertail = NEXT_INSN (tail);
head = BB_HEAD (bb);
for (; insn != aftertail; insn = NEXT_INSN (insn))
{
gcc_assert (!LABEL_P (insn));
if (inside_basic_block_p (insn))
{
if (!last_inside)
{
rtx note;
if (LABEL_P (insn))
{
note = emit_note_after (NOTE_INSN_BASIC_BLOCK, insn);
head = insn;
last_inside = note;
}
else
{
note = emit_note_before (NOTE_INSN_BASIC_BLOCK, insn);
head = note;
last_inside = insn;
}
}
else
last_inside = insn;
}
if (control_flow_insn_p (insn) || (insn == tail && last_inside))
{
basic_block curr_bb = BLOCK_FOR_INSN (insn);
rtx note;
if (!control_flow_insn_p (insn))
curr_bb = last;
if (bb == last->next_bb)
{
edge f;
rtx h;
edge_iterator ei;
FOR_EACH_EDGE (f, ei, bb->prev_bb->succs)
if (f->flags & EDGE_FALLTHRU)
break;
if (f)
{
last = curr_bb = split_edge (f);
h = BB_HEAD (curr_bb);
BB_HEAD (curr_bb) = head;
BB_END (curr_bb) = insn;
delete_insn (h);
}
else
{
rtx next = next_nonnote_insn (insn);
delete_insn_chain (head, insn);
if (BARRIER_P (next))
{
emit_barrier_after (prev_nonnote_insn (head));
delete_insn (next);
}
insn = NULL;
}
}
else
{
BB_HEAD (curr_bb) = head;
BB_END (curr_bb) = insn;
add_missing_bbs (BB_HEAD (curr_bb), bb, curr_bb->prev_bb);
}
note = LABEL_P (head) ? NEXT_INSN (head) : head;
NOTE_BASIC_BLOCK (note) = curr_bb;
update_bb_for_insn (curr_bb);
bb = curr_bb->next_bb;
last_inside = NULL;
if (!insn)
break;
}
}
add_missing_bbs (BB_HEAD (last->next_bb), bb, last);
return bb->prev_bb;
}
static basic_block
earliest_block_with_similiar_load (basic_block last_block, rtx load_insn)
{
rtx back_link;
basic_block bb, earliest_block = NULL;
for (back_link = LOG_LINKS (load_insn);
back_link;
back_link = XEXP (back_link, 1))
{
rtx insn1 = XEXP (back_link, 0);
if (GET_MODE (back_link) == VOIDmode)
{
rtx fore_link;
for (fore_link = INSN_DEPEND (insn1);
fore_link;
fore_link = XEXP (fore_link, 1))
{
rtx insn2 = XEXP (fore_link, 0);
basic_block insn2_block = BLOCK_FOR_INSN (insn2);
if (GET_MODE (fore_link) == VOIDmode)
{
if (earliest_block != NULL
&& earliest_block->index < insn2_block->index)
continue;
if (haifa_classify_insn (insn2) != PFREE_CANDIDATE)
continue;
for (bb = last_block; bb; bb = bb->aux)
if (insn2_block == bb)
break;
if (!bb)
earliest_block = insn2_block;
}
}
}
}
return earliest_block;
}
static void
add_deps_for_risky_insns (rtx head, rtx tail)
{
rtx insn, prev;
int class;
rtx last_jump = NULL_RTX;
rtx next_tail = NEXT_INSN (tail);
basic_block last_block = NULL, bb;
for (insn = head; insn != next_tail; insn = NEXT_INSN (insn))
if (JUMP_P (insn))
{
bb = BLOCK_FOR_INSN (insn);
bb->aux = last_block;
last_block = bb;
last_jump = insn;
}
else if (INSN_P (insn) && last_jump != NULL_RTX)
{
class = haifa_classify_insn (insn);
prev = last_jump;
switch (class)
{
case PFREE_CANDIDATE:
if (flag_schedule_speculative_load)
{
bb = earliest_block_with_similiar_load (last_block, insn);
if (bb)
{
bb = bb->aux;
if (!bb)
break;
prev = BB_END (bb);
}
}
case TRAP_RISKY:
case IRISKY:
case PRISKY_CANDIDATE:
if (add_dependence (insn, prev, REG_DEP_ANTI))
add_forward_dependence (prev, insn, REG_DEP_ANTI);
break;
default:
break;
}
}
while (last_block)
{
bb = last_block->aux;
last_block->aux = NULL;
last_block = bb;
}
}
static basic_block
schedule_ebb (rtx head, rtx tail)
{
int n_insns;
basic_block b;
struct deps tmp_deps;
basic_block first_bb = BLOCK_FOR_INSN (head);
basic_block last_bb = BLOCK_FOR_INSN (tail);
if (no_real_insns_p (head, tail))
return BLOCK_FOR_INSN (tail);
init_deps_global ();
init_deps (&tmp_deps);
sched_analyze (&tmp_deps, head, tail);
free_deps (&tmp_deps);
compute_forward_dependences (head, tail);
add_deps_for_risky_insns (head, tail);
if (targetm.sched.dependencies_evaluation_hook)
targetm.sched.dependencies_evaluation_hook (head, tail);
n_insns = set_priorities (head, tail);
current_sched_info->prev_head = PREV_INSN (head);
current_sched_info->next_tail = NEXT_INSN (tail);
if (write_symbols != NO_DEBUG)
{
save_line_notes (first_bb->index, head, tail);
rm_line_notes (head, tail);
}
if (INSN_P (head))
{
rtx note;
for (note = REG_NOTES (head); note; note = XEXP (note, 1))
if (REG_NOTE_KIND (note) == REG_SAVE_NOTE)
remove_note (head, note);
}
rm_other_notes (head, tail);
current_sched_info->queue_must_finish_empty = 1;
schedule_block (-1, n_insns);
gcc_assert (sched_n_insns == n_insns);
head = current_sched_info->head;
tail = current_sched_info->tail;
if (write_symbols != NO_DEBUG)
restore_line_notes (head, tail);
b = fix_basic_block_boundaries (first_bb, last_bb, head, tail);
finish_deps_global ();
return b;
}
void
schedule_ebbs (FILE *dump_file)
{
basic_block bb;
int probability_cutoff;
if (profile_info && flag_branch_probabilities)
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY_FEEDBACK);
else
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY);
probability_cutoff = REG_BR_PROB_BASE / 100 * probability_cutoff;
if (n_basic_blocks == 0)
return;
sched_init (dump_file);
current_sched_info = &ebb_sched_info;
compute_bb_for_insn ();
FOR_EACH_BB (bb)
{
rtx head = BB_HEAD (bb);
rtx tail;
for (;;)
{
edge e;
edge_iterator ei;
tail = BB_END (bb);
if (bb->next_bb == EXIT_BLOCK_PTR
|| LABEL_P (BB_HEAD (bb->next_bb)))
break;
FOR_EACH_EDGE (e, ei, bb->succs)
if ((e->flags & EDGE_FALLTHRU) != 0)
break;
if (! e)
break;
if (e->probability <= probability_cutoff)
break;
bb = bb->next_bb;
}
while (head != tail)
{
if (NOTE_P (head))
head = NEXT_INSN (head);
else if (NOTE_P (tail))
tail = PREV_INSN (tail);
else if (LABEL_P (head))
head = NEXT_INSN (head);
else
break;
}
bb = schedule_ebb (head, tail);
}
if (reload_completed)
reposition_prologue_and_epilogue_notes (get_insns ());
if (write_symbols != NO_DEBUG)
rm_redundant_line_notes ();
sched_finish ();
}