Commit 8772202b by Nicolas Capens Committed by Nicolas Capens

Replace only loads matching the store type.

If a load uses the same address as a previous store, we can only replace it with the store's data when their types match. Also, stores can only be eliminated when all the loads following them have been replaced. Bug swiftshader:48 Change-Id: I5968b256cb295243c30df3598e49015bccb5bff1 Reviewed-on: https://swiftshader-review.googlesource.com/10950Tested-by: 's avatarNicolas Capens <nicolascapens@google.com> Reviewed-by: 's avatarAlexis Hétu <sugoi@google.com> Reviewed-by: 's avatarNicolas Capens <nicolascapens@google.com>
parent f2f5e962
......@@ -46,6 +46,7 @@ namespace
static Ice::Operand *loadAddress(const Ice::Inst *instruction);
static Ice::Operand *storeData(const Ice::Inst *instruction);
static std::size_t storeSize(const Ice::Inst *instruction);
static bool loadTypeMatchesStore(const Ice::Inst *load, const Ice::Inst *store);
Ice::Cfg *function;
Ice::GlobalContext *context;
......@@ -202,6 +203,11 @@ namespace
continue;
}
if(!loadTypeMatchesStore(load, store))
{
continue;
}
replace(load, storeValue);
for(size_t i = 0; i < addressUses.loads.size(); i++)
......@@ -298,6 +304,7 @@ namespace
auto &insts = singleBasicBlock->getInsts();
Ice::Inst *store = nullptr;
Ice::Operand *storeValue = nullptr;
bool unmatchedLoads = false;
for(Ice::Inst &inst : insts)
{
......@@ -314,7 +321,7 @@ namespace
}
// New store found. If we had a previous one, try to eliminate it.
if(store)
if(store && !unmatchedLoads)
{
// If the previous store is wider than the new one, we can't eliminate it
// because there could be a wide load reading its non-overwritten data.
......@@ -326,6 +333,7 @@ namespace
store = &inst;
storeValue = storeData(store);
unmatchedLoads = false;
}
else if(isLoad(inst))
{
......@@ -336,10 +344,13 @@ namespace
continue;
}
if(storeValue)
if(!loadTypeMatchesStore(load, store))
{
replace(load, storeValue);
unmatchedLoads = true;
continue;
}
replace(load, storeValue);
}
}
}
......@@ -586,6 +597,38 @@ namespace
return 0;
}
bool Optimizer::loadTypeMatchesStore(const Ice::Inst *load, const Ice::Inst *store)
{
if(!load || !store)
{
return false;
}
assert(isLoad(*load) && isStore(*store));
assert(loadAddress(load) == storeAddress(store));
if(auto *instStore = llvm::dyn_cast<Ice::InstStore>(store))
{
if(auto *instLoad = llvm::dyn_cast<Ice::InstLoad>(load))
{
return instStore->getData()->getType() == instLoad->getDest()->getType();
}
}
if(auto *storeSubVector = asStoreSubVector(store))
{
if(auto *loadSubVector = asLoadSubVector(load))
{
// Check for matching type and sub-vector width.
return storeSubVector->getSrc(1)->getType() == loadSubVector->getDest()->getType() &&
llvm::cast<Ice::ConstantInteger32>(storeSubVector->getSrc(3))->getValue() ==
llvm::cast<Ice::ConstantInteger32>(loadSubVector->getSrc(2))->getValue();
}
}
return false;
}
bool Optimizer::Uses::areOnlyLoadStore() const
{
return size() == (loads.size() + stores.size());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment