B3StackmapSpecial.cpp [plain text]
#include "config.h"
#include "B3StackmapSpecial.h"
#if ENABLE(B3_JIT)
#include "AirCode.h"
#include "AirGenerationContext.h"
#include "B3ValueInlines.h"
namespace JSC { namespace B3 {
using Arg = Air::Arg;
using Inst = Air::Inst;
using Tmp = Air::Tmp;
StackmapSpecial::StackmapSpecial()
{
}
StackmapSpecial::~StackmapSpecial()
{
}
void StackmapSpecial::reportUsedRegisters(Inst& inst, const RegisterSet& usedRegisters)
{
StackmapValue* value = inst.origin->as<StackmapValue>();
ASSERT(value);
value->m_usedRegisters.merge(usedRegisters);
}
RegisterSet StackmapSpecial::extraClobberedRegs(Inst& inst)
{
StackmapValue* value = inst.origin->as<StackmapValue>();
ASSERT(value);
return value->lateClobbered();
}
RegisterSet StackmapSpecial::extraEarlyClobberedRegs(Inst& inst)
{
StackmapValue* value = inst.origin->as<StackmapValue>();
ASSERT(value);
return value->earlyClobbered();
}
void StackmapSpecial::forEachArgImpl(
unsigned numIgnoredB3Args, unsigned numIgnoredAirArgs,
Inst& inst, RoleMode roleMode, Optional<unsigned> firstRecoverableIndex,
const ScopedLambda<Inst::EachArgCallback>& callback, Optional<Width> optionalDefArgWidth)
{
StackmapValue* value = inst.origin->as<StackmapValue>();
ASSERT(value);
ASSERT(inst.args.size() >= numIgnoredAirArgs);
ASSERT(value->children().size() >= numIgnoredB3Args);
ASSERT(inst.args.size() - numIgnoredAirArgs >= value->children().size() - numIgnoredB3Args);
ASSERT(inst.args[0].kind() == Arg::Kind::Special);
for (unsigned i = 0; i < value->children().size() - numIgnoredB3Args; ++i) {
Arg& arg = inst.args[i + numIgnoredAirArgs];
ConstrainedValue child = value->constrainedChild(i + numIgnoredB3Args);
Arg::Role role;
switch (roleMode) {
case ForceLateUseUnlessRecoverable:
ASSERT(firstRecoverableIndex);
if (arg != inst.args[*firstRecoverableIndex] && arg != inst.args[*firstRecoverableIndex + 1]) {
role = Arg::LateColdUse;
break;
}
FALLTHROUGH;
case SameAsRep:
switch (child.rep().kind()) {
case ValueRep::WarmAny:
case ValueRep::SomeRegister:
case ValueRep::Register:
case ValueRep::Stack:
case ValueRep::StackArgument:
case ValueRep::Constant:
role = Arg::Use;
break;
case ValueRep::SomeRegisterWithClobber:
role = Arg::UseDef;
break;
case ValueRep::LateRegister:
role = Arg::LateUse;
break;
case ValueRep::ColdAny:
role = Arg::ColdUse;
break;
case ValueRep::LateColdAny:
role = Arg::LateColdUse;
break;
default:
RELEASE_ASSERT_NOT_REACHED();
break;
}
if (!Arg::isLateUse(role) && optionalDefArgWidth && *optionalDefArgWidth < child.value()->resultWidth()) {
RELEASE_ASSERT(!Arg::isAnyDef(role));
if (Arg::isWarmUse(role))
role = Arg::LateUse;
else
role = Arg::LateColdUse;
}
break;
case ForceLateUse:
role = Arg::LateColdUse;
break;
}
Type type = child.value()->type();
callback(arg, role, bankForType(type), widthForType(type));
}
}
bool StackmapSpecial::isValidImpl(
unsigned numIgnoredB3Args, unsigned numIgnoredAirArgs,
Inst& inst)
{
StackmapValue* value = inst.origin->as<StackmapValue>();
ASSERT(value);
ASSERT(inst.args.size() >= numIgnoredAirArgs);
ASSERT(value->children().size() >= numIgnoredB3Args);
if (inst.args.size() - numIgnoredAirArgs < value->children().size() - numIgnoredB3Args)
return false;
for (unsigned i = 0; i < value->children().size() - numIgnoredB3Args; ++i) {
Value* child = value->child(i + numIgnoredB3Args);
Arg& arg = inst.args[i + numIgnoredAirArgs];
if (!isArgValidForValue(arg, child))
return false;
}
ASSERT(value->m_reps.size() <= value->children().size());
for (unsigned i = numIgnoredB3Args; i < value->m_reps.size(); ++i) {
ValueRep& rep = value->m_reps[i];
Arg& arg = inst.args[i - numIgnoredB3Args + numIgnoredAirArgs];
if (!isArgValidForRep(code(), arg, rep))
return false;
}
return true;
}
bool StackmapSpecial::admitsStackImpl(
unsigned numIgnoredB3Args, unsigned numIgnoredAirArgs,
Inst& inst, unsigned argIndex)
{
StackmapValue* value = inst.origin->as<StackmapValue>();
ASSERT(value);
unsigned stackmapArgIndex = argIndex - numIgnoredAirArgs + numIgnoredB3Args;
if (stackmapArgIndex >= value->numChildren()) {
return false;
}
if (stackmapArgIndex >= value->m_reps.size()) {
return true;
}
if (value->m_reps[stackmapArgIndex].isAny())
return true;
return false;
}
Vector<ValueRep> StackmapSpecial::repsImpl(Air::GenerationContext& context, unsigned numIgnoredB3Args, unsigned numIgnoredAirArgs, Inst& inst)
{
Vector<ValueRep> result;
for (unsigned i = 0; i < inst.origin->numChildren() - numIgnoredB3Args; ++i)
result.append(repForArg(*context.code, inst.args[i + numIgnoredAirArgs]));
return result;
}
bool StackmapSpecial::isArgValidForValue(const Air::Arg& arg, Value* value)
{
switch (arg.kind()) {
case Arg::Tmp:
case Arg::Imm:
case Arg::BigImm:
break;
default:
if (!arg.isStackMemory())
return false;
break;
}
return arg.canRepresent(value);
}
bool StackmapSpecial::isArgValidForRep(Air::Code& code, const Air::Arg& arg, const ValueRep& rep)
{
switch (rep.kind()) {
case ValueRep::WarmAny:
case ValueRep::ColdAny:
case ValueRep::LateColdAny:
return true;
case ValueRep::SomeRegister:
case ValueRep::SomeRegisterWithClobber:
case ValueRep::SomeEarlyRegister:
return arg.isTmp();
case ValueRep::LateRegister:
case ValueRep::Register:
return arg == Tmp(rep.reg());
case ValueRep::StackArgument:
if (arg == Arg::callArg(rep.offsetFromSP()))
return true;
if ((arg.isAddr() || arg.isExtendedOffsetAddr()) && code.frameSize()) {
if (arg.base() == Tmp(GPRInfo::callFrameRegister)
&& arg.offset() == static_cast<int64_t>(rep.offsetFromSP()) - code.frameSize())
return true;
if (arg.base() == Tmp(MacroAssembler::stackPointerRegister)
&& arg.offset() == rep.offsetFromSP())
return true;
}
return false;
default:
RELEASE_ASSERT_NOT_REACHED();
return false;
}
}
ValueRep StackmapSpecial::repForArg(Air::Code& code, const Arg& arg)
{
switch (arg.kind()) {
case Arg::Tmp:
return ValueRep::reg(arg.reg());
break;
case Arg::Imm:
case Arg::BigImm:
return ValueRep::constant(arg.value());
break;
case Arg::ExtendedOffsetAddr:
ASSERT(arg.base() == Tmp(GPRInfo::callFrameRegister));
FALLTHROUGH;
case Arg::Addr:
if (arg.base() == Tmp(GPRInfo::callFrameRegister))
return ValueRep::stack(arg.offset());
ASSERT(arg.base() == Tmp(MacroAssembler::stackPointerRegister));
return ValueRep::stack(arg.offset() - safeCast<Value::OffsetType>(code.frameSize()));
default:
ASSERT_NOT_REACHED();
return ValueRep();
}
}
} }
namespace WTF {
using namespace JSC::B3;
void printInternal(PrintStream& out, StackmapSpecial::RoleMode mode)
{
switch (mode) {
case StackmapSpecial::SameAsRep:
out.print("SameAsRep");
return;
case StackmapSpecial::ForceLateUseUnlessRecoverable:
out.print("ForceLateUseUnlessRecoverable");
return;
case StackmapSpecial::ForceLateUse:
out.print("ForceLateUse");
return;
}
RELEASE_ASSERT_NOT_REACHED();
}
}
#endif // ENABLE(B3_JIT)