Commit ac2388c3 by John Porto

Subzero. X86. Refactors Address Mode formation.

Refactors the Address Mode optimization interface. BUG= R=stichnot@chromium.org Review URL: https://codereview.chromium.org/1605103002 .
parent 6b80cf10
......@@ -270,7 +270,9 @@ TargetX8664Traits::Address TargetX8664Traits::X86OperandMem::toAsmAddress(
const bool NeedSandboxing = Target->needSandboxing();
(void)NeedSandboxing;
assert(!NeedSandboxing || IsLeaAddr ||
(getBase()->getRegNum() == Traits::RegisterSet::Reg_r15));
(getBase()->getRegNum() == Traits::RegisterSet::Reg_r15) ||
(getBase()->getRegNum() == Traits::RegisterSet::Reg_rsp) ||
(getBase()->getRegNum() == Traits::RegisterSet::Reg_rbp));
return X8664::Traits::Address(getEncodedGPR(getBase()->getRegNum()),
getEncodedGPR(getIndex()->getRegNum()),
X8664::Traits::ScaleFactor(getShift()), Disp,
......
......@@ -245,8 +245,21 @@ void TargetLowering::staticInit(GlobalContext *Ctx) {
}
}
TargetLowering::SandboxType
TargetLowering::determineSandboxTypeFromFlags(const ClFlags &Flags) {
assert(!Flags.getUseSandboxing() || !Flags.getUseNonsfi());
if (Flags.getUseNonsfi()) {
return TargetLowering::ST_Nonsfi;
}
if (Flags.getUseSandboxing()) {
return TargetLowering::ST_NaCl;
}
return TargetLowering::ST_None;
}
TargetLowering::TargetLowering(Cfg *Func)
: Func(Func), Ctx(Func->getContext()), Context() {}
: Func(Func), Ctx(Func->getContext()),
SandboxingType(determineSandboxTypeFromFlags(Ctx->getFlags())) {}
TargetLowering::AutoBundle::AutoBundle(TargetLowering *Target,
InstBundleLock::Option Option)
......
......@@ -455,6 +455,15 @@ protected:
bool shouldOptimizeMemIntrins();
/// SandboxType enumerates all possible sandboxing strategies that
enum SandboxType {
ST_None,
ST_NaCl,
ST_Nonsfi,
};
static SandboxType determineSandboxTypeFromFlags(const ClFlags &Flags);
Cfg *Func;
GlobalContext *Ctx;
bool HasComputedFrame = false;
......@@ -462,6 +471,7 @@ protected:
SizeT NextLabelNumber = 0;
SizeT NextJumpTableNumber = 0;
LoweringContext Context;
const SandboxType SandboxingType = ST_None;
// Runtime helper function names
const static constexpr char *H_bitcast_16xi1_i16 = "__Sz_bitcast_16xi1_i16";
......
......@@ -274,7 +274,7 @@ std::array<uint32_t, NumVec128Args> Vec128ArgInitializer;
} // end of anonymous namespace
TargetARM32::TargetARM32(Cfg *Func)
: TargetLowering(Func), NeedSandboxing(Ctx->getFlags().getUseSandboxing()),
: TargetLowering(Func), NeedSandboxing(SandboxingType == ST_NaCl),
CPUFeatures(Func->getContext()->getFlags()) {}
void TargetARM32::staticInit(GlobalContext *Ctx) {
......
......@@ -137,6 +137,45 @@ void TargetX8632::_mov_sp(Operand *NewValue) {
_redefined(_mov(esp, NewValue));
}
Traits::X86OperandMem *TargetX8632::_sandbox_mem_reference(X86OperandMem *Mem) {
switch (SandboxingType) {
case ST_None:
case ST_NaCl:
return Mem;
case ST_Nonsfi: {
if (Mem->getIsRebased()) {
return Mem;
}
// For Non-SFI mode, if the Offset field is a ConstantRelocatable, we
// replace either Base or Index with a legalized RebasePtr. At emission
// time, the ConstantRelocatable will be emitted with the @GOTOFF
// relocation.
if (llvm::dyn_cast_or_null<ConstantRelocatable>(Mem->getOffset()) ==
nullptr) {
return Mem;
}
Variable *T;
uint16_t Shift = 0;
if (Mem->getIndex() == nullptr) {
T = Mem->getBase();
} else if (Mem->getBase() == nullptr) {
T = Mem->getIndex();
Shift = Mem->getShift();
} else {
llvm::report_fatal_error(
"Either Base or Index must be unused in Non-SFI mode");
}
Variable *RebasePtrR = legalizeToReg(RebasePtr);
static constexpr bool IsRebased = true;
return Traits::X86OperandMem::create(
Func, Mem->getType(), RebasePtrR, Mem->getOffset(), T, Shift,
Traits::X86OperandMem::DefaultSegment, IsRebased);
}
}
llvm::report_fatal_error("Unhandled sandboxing type: " +
std::to_string(SandboxingType));
}
void TargetX8632::_sub_sp(Operand *Adjustment) {
Variable *esp = getPhysicalRegister(Traits::RegisterSet::Reg_esp);
_sub(esp, Adjustment);
......@@ -215,6 +254,47 @@ void TargetX8632::lowerIndirectJump(Variable *JumpTarget) {
_jmp(JumpTarget);
}
void TargetX8632::initRebasePtr() {
if (SandboxingType == ST_Nonsfi) {
RebasePtr = Func->makeVariable(IceType_i32);
}
}
void TargetX8632::initSandbox() {
if (SandboxingType != ST_Nonsfi) {
return;
}
// Insert the RebasePtr assignment as the very first lowered instruction.
// Later, it will be moved into the right place - after the stack frame is set
// up but before in-args are copied into registers.
Context.init(Func->getEntryNode());
Context.setInsertPoint(Context.getCur());
Context.insert<Traits::Insts::GetIP>(RebasePtr);
}
bool TargetX8632::legalizeOptAddrForSandbox(OptAddr *Addr) {
if (Addr->Relocatable == nullptr || SandboxingType != ST_Nonsfi) {
return true;
}
if (Addr->Base == RebasePtr || Addr->Index == RebasePtr) {
return true;
}
if (Addr->Base == nullptr) {
Addr->Base = RebasePtr;
return true;
}
if (Addr->Index == nullptr) {
Addr->Index = RebasePtr;
Addr->Shift = 0;
return true;
}
return false;
}
Inst *TargetX8632::emitCallToTarget(Operand *CallTarget, Variable *ReturnReg) {
std::unique_ptr<AutoBundle> Bundle;
if (NeedSandboxing) {
......
......@@ -48,15 +48,15 @@ public:
protected:
void _add_sp(Operand *Adjustment);
void _mov_sp(Operand *NewValue);
Traits::X86OperandMem *_sandbox_mem_reference(X86OperandMem *) {
llvm::report_fatal_error("sandbox mem reference for x86-32.");
}
Traits::X86OperandMem *_sandbox_mem_reference(X86OperandMem *Mem);
void _sub_sp(Operand *Adjustment);
void _link_bp();
void _unlink_bp();
void _push_reg(Variable *Reg);
void initSandbox() {}
void initRebasePtr();
void initSandbox();
bool legalizeOptAddrForSandbox(OptAddr *Addr);
void emitSandboxedReturn();
void lowerIndirectJump(Variable *JumpTarget);
void emitGetIP(CfgNode *Node);
......
......@@ -298,33 +298,48 @@ Traits::X86OperandMem *TargetX8664::_sandbox_mem_reference(X86OperandMem *Mem) {
// In x86_64-nacl, all memory references are relative to %r15 (i.e., %rzp.)
// NaCl sandboxing also requires that any registers that are not %rsp and
// %rbp to be 'truncated' to 32-bit before memory access.
assert(NeedSandboxing);
if (SandboxingType == ST_None) {
return Mem;
}
if (SandboxingType == ST_Nonsfi) {
llvm::report_fatal_error(
"_sandbox_mem_reference not implemented for nonsfi");
}
Variable *Base = Mem->getBase();
Variable *Index = Mem->getIndex();
uint16_t Shift = 0;
Variable *r15 =
Variable *ZeroReg =
getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64);
Constant *Offset = Mem->getOffset();
Variable *T = nullptr;
if (Mem->getIsRebased()) {
// If Mem.IsRebased, then we don't need to update Mem to contain a reference
// to %r15, but we still need to truncate Mem.Index (if any) to 32-bit.
assert(r15 == Base);
T = Index;
Shift = Mem->getShift();
} else if (Base != nullptr && Index != nullptr) {
// Another approach could be to emit an
//
// lea Mem, %T
//
// And then update Mem.Base = r15, Mem.Index = T, Mem.Shift = 0
llvm::report_fatal_error("memory reference contains base and index.");
} else if (Base != nullptr) {
T = Base;
} else if (Index != nullptr) {
T = Index;
// to a valid base register (%r15, %rsp, or %rbp), but we still need to
// truncate Mem.Index (if any) to 32-bit.
assert(ZeroReg == Base || Base->isRematerializable());
T = makeReg(IceType_i32);
_mov(T, Index);
Shift = Mem->getShift();
} else {
if (Base != nullptr) {
if (Base->isRematerializable()) {
ZeroReg = Base;
} else {
T = Base;
}
}
if (Index != nullptr) {
assert(!Index->isRematerializable());
if (T != nullptr) {
llvm::report_fatal_error("memory reference contains base and index.");
}
T = Index;
Shift = Mem->getShift();
}
}
// NeedsLea is a flags indicating whether Mem needs to be materialized to a
......@@ -399,7 +414,7 @@ Traits::X86OperandMem *TargetX8664::_sandbox_mem_reference(X86OperandMem *Mem) {
static constexpr bool IsRebased = true;
return Traits::X86OperandMem::create(
Func, Mem->getType(), r15, Offset, T, Shift,
Func, Mem->getType(), ZeroReg, Offset, T, Shift,
Traits::X86OperandMem::DefaultSegment, IsRebased);
}
......@@ -427,8 +442,23 @@ void TargetX8664::_sub_sp(Operand *Adjustment) {
_add(rsp, r15);
}
void TargetX8664::initRebasePtr() {
switch (SandboxingType) {
case ST_Nonsfi:
// Probably no implementation is needed, but error to be safe for now.
llvm::report_fatal_error(
"initRebasePtr() is not yet implemented on x32-nonsfi.");
case ST_NaCl:
RebasePtr = getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64);
break;
case ST_None:
// nothing.
break;
}
}
void TargetX8664::initSandbox() {
assert(NeedSandboxing);
assert(SandboxingType == ST_NaCl);
Context.init(Func->getEntryNode());
Context.setInsertPoint(Context.getCur());
Variable *r15 =
......@@ -437,6 +467,45 @@ void TargetX8664::initSandbox() {
Context.insert<InstFakeUse>(r15);
}
namespace {
bool isRematerializable(const Variable *Var) {
return Var != nullptr && Var->isRematerializable();
}
} // end of anonymous namespace
bool TargetX8664::legalizeOptAddrForSandbox(OptAddr *Addr) {
if (SandboxingType == ST_Nonsfi) {
llvm::report_fatal_error("Nonsfi not yet implemented for x8664.");
}
if (isRematerializable(Addr->Base)) {
if (Addr->Index == RebasePtr) {
Addr->Index = nullptr;
Addr->Shift = 0;
}
return true;
}
if (isRematerializable(Addr->Index)) {
if (Addr->Base == RebasePtr) {
Addr->Base = nullptr;
}
return true;
}
assert(Addr->Base != RebasePtr && Addr->Index != RebasePtr);
if (Addr->Base == nullptr) {
return true;
}
if (Addr->Index == nullptr) {
return true;
}
return false;
}
void TargetX8664::lowerIndirectJump(Variable *JumpTarget) {
std::unique_ptr<AutoBundle> Bundler;
......
......@@ -55,7 +55,9 @@ protected:
void _unlink_bp();
void _push_reg(Variable *Reg);
void initRebasePtr();
void initSandbox();
bool legalizeOptAddrForSandbox(OptAddr *Addr);
void emitSandboxedReturn();
void lowerIndirectJump(Variable *JumpTarget);
void emitGetIP(CfgNode *Node);
......
......@@ -194,7 +194,17 @@ protected:
void postLower() override;
/// Initializes the RebasePtr member variable -- if so required by
/// SandboxingType for the concrete Target.
void initRebasePtr() {
assert(SandboxingType != ST_None);
dispatchToConcrete(&Traits::ConcreteTarget::initRebasePtr);
}
/// Emit code that initializes the value of the RebasePtr near the start of
/// the function -- if so required by SandboxingType for the concrete type.
void initSandbox() {
assert(SandboxingType != ST_None);
dispatchToConcrete(&Traits::ConcreteTarget::initSandbox);
}
......@@ -225,6 +235,25 @@ protected:
Type ReturnType);
uint32_t getCallStackArgumentsSizeBytes(const InstCall *Instr) override;
void genTargetHelperCallFor(Inst *Instr) override;
/// OptAddr wraps all the possible operands that an x86 address might have.
struct OptAddr {
Variable *Base = nullptr;
Variable *Index = nullptr;
uint16_t Shift = 0;
int32_t Offset = 0;
ConstantRelocatable *Relocatable = nullptr;
};
/// Legalizes Addr w.r.t. SandboxingType. The exact type of legalization
/// varies for different <Target, SandboxingType> tuples.
bool legalizeOptAddrForSandbox(OptAddr *Addr) {
return dispatchToConcrete(
&Traits::ConcreteTarget::legalizeOptAddrForSandbox, std::move(Addr));
}
// Builds information for a canonical address expresion:
// <Relocatable + Offset>(Base, Index, Shift)
X86OperandMem *computeAddressOpt(const Inst *Instr, Type MemType,
Operand *Addr);
void doAddressOptLoad() override;
void doAddressOptStore() override;
void doMockBoundsCheck(Operand *Opnd) override;
......@@ -322,7 +351,7 @@ protected:
Legal_Imm = 1 << 1,
Legal_Mem = 1 << 2, // includes [eax+4*ecx] as well as [esp+12]
Legal_Rematerializable = 1 << 3,
Legal_AddrAbs = 1 << 4, // ConstantRelocatable doesn't have to add GotVar
Legal_AddrAbs = 1 << 4, // ConstantRelocatable doesn't have to add RebasePtr
Legal_Default = ~(Legal_Rematerializable | Legal_AddrAbs)
// TODO(stichnot): Figure out whether this default works for x86-64.
};
......@@ -410,11 +439,9 @@ protected:
template <typename... T>
AutoMemorySandboxer(typename Traits::TargetLowering *Target, T... Args)
: Target(Target),
MemOperand(
(!Traits::Is64Bit || !Target->Ctx->getFlags().getUseSandboxing())
? nullptr
: findMemoryReference(Args...)) {
: Target(Target), MemOperand(Target->SandboxingType == ST_None
? nullptr
: findMemoryReference(Args...)) {
if (MemOperand != nullptr) {
Bundler = makeUnique<AutoBundle>(Target, BundleLockOpt);
*MemOperand = Target->_sandbox_mem_reference(*MemOperand);
......@@ -932,9 +959,9 @@ protected:
RegisterAliases;
llvm::SmallBitVector RegsUsed;
std::array<VarList, IceType_NUM> PhysicalRegisters;
// GotVar is a Variable that holds the GlobalOffsetTable address for Non-SFI
// mode.
Variable *GotVar = nullptr;
// RebasePtr is a Variable that holds the Rebasing pointer (if any) for the
// current sandboxing type.
Variable *RebasePtr = nullptr;
/// Randomize a given immediate operand
Operand *randomizeOrPoolImmediate(Constant *Immediate,
......@@ -1002,10 +1029,6 @@ private:
/// Optimizations for idiom recognition.
bool lowerOptimizeFcmpSelect(const InstFcmp *Fcmp, const InstSelect *Select);
/// Emit code that initializes the value of the GotVar near the start of the
/// function. (This code is emitted only in Non-SFI mode.)
void initGotVarIfNeeded();
/// Complains loudly if invoked because the cpu can handle 64-bit types
/// natively.
template <typename T = Traits>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment