Commit 5e5b7537 by Jamie Madill Committed by Commit Bot

Vulkan: Optimize resource tracking in CommandBufferHelper.

Introduces a FastUnorderedMap class that uses FastVector. This type uses static storage for up to a small fixed number of elements and uses dynamic storage for anything larger. Local testing shows this almost fully solves the regression from using unordered_map. It's still slightly slower than using no tracking (<5%). Very degenerate cases which track dozens or more buffers will still have significant overhead. For almost all applications that use only a few buffers per RenderPass this will be very fast. Test: angle_perftests, *vulkan_null_index_buffer_changed_ushort Bug: angleproject:4950 Bug: angleproject:4429 Change-Id: I39edeaaa159124167f1ea23ad2e6eac5e9220d0b Reviewed-on: https://chromium-review.googlesource.com/c/angle/angle/+/2348108Reviewed-by: 's avatarCourtney Goeltzenleuchter <courtneygo@google.com> Reviewed-by: 's avatarTim Van Patten <timvp@google.com> Commit-Queue: Jamie Madill <jmadill@chromium.org>
parent 398a874c
......@@ -212,9 +212,8 @@ ANGLE_INLINE typename FastVector<T, N, Storage>::reference FastVector<T, N, Stor
}
template <class T, size_t N, class Storage>
ANGLE_INLINE
typename FastVector<T, N, Storage>::const_reference FastVector<T, N, Storage>::operator[](
size_type pos) const
ANGLE_INLINE typename FastVector<T, N, Storage>::const_reference
FastVector<T, N, Storage>::operator[](size_type pos) const
{
ASSERT(pos < mSize);
return mData[pos];
......@@ -427,6 +426,54 @@ void FastVector<T, N, Storage>::ensure_capacity(size_t capacity)
mReservedSize = newSize;
}
}
template <class Key, class Value, size_t N>
class FastUnorderedMap final
{
public:
using Pair = std::pair<Key, Value>;
FastUnorderedMap() {}
~FastUnorderedMap() {}
void insert(Key key, Value value)
{
ASSERT(!contains(key));
mData.push_back(Pair(key, value));
}
bool contains(Key key) const
{
for (size_t index = 0; index < mData.size(); ++index)
{
if (mData[index].first == key)
return true;
}
return false;
}
void clear() { mData.clear(); }
bool get(Key key, Value *value) const
{
for (size_t index = 0; index < mData.size(); ++index)
{
const Pair &item = mData[index];
if (item.first == key)
{
*value = item.second;
return true;
}
}
return false;
}
bool empty() const { return mData.empty(); }
private:
FastVector<Pair, N> mData;
};
} // namespace angle
#endif // COMMON_FASTVECTOR_H_
......@@ -233,4 +233,35 @@ TEST(FastVector, NonCopyable)
EXPECT_EQ(1u, copy.size());
EXPECT_EQ(3, copy[0].x);
}
// Basic functionality for FastUnorderedMap
TEST(FastUnorderedMap, BasicUsage)
{
FastUnorderedMap<int, bool, 3> testMap;
EXPECT_TRUE(testMap.empty());
testMap.insert(5, true);
EXPECT_TRUE(testMap.contains(5));
bool value = false;
EXPECT_TRUE(testMap.get(5, &value));
EXPECT_TRUE(value);
EXPECT_FALSE(testMap.get(6, &value));
EXPECT_FALSE(testMap.empty());
testMap.clear();
EXPECT_TRUE(testMap.empty());
for (int i = 0; i < 10; ++i)
{
testMap.insert(i, false);
}
for (int i = 0; i < 10; ++i)
{
EXPECT_TRUE(testMap.contains(i));
EXPECT_TRUE(testMap.get(i, &value));
EXPECT_FALSE(value);
}
}
} // namespace angle
......@@ -569,13 +569,17 @@ void CommandBufferHelper::initialize(bool isRenderPassCommandBuffer, bool mergeB
bool CommandBufferHelper::usesBuffer(const BufferHelper &buffer) const
{
return mUsedBuffers.count(buffer.getBufferSerial()) > 0;
return mUsedBuffers.contains(buffer.getBufferSerial());
}
bool CommandBufferHelper::usesBufferForWrite(const BufferHelper &buffer) const
{
auto iter = mUsedBuffers.find(buffer.getBufferSerial());
return iter != mUsedBuffers.end() && iter->second == BufferAccess::Write;
BufferAccess access;
if (!mUsedBuffers.get(buffer.getBufferSerial(), &access))
{
return false;
}
return access == BufferAccess::Write;
}
void CommandBufferHelper::bufferRead(ResourceUseList *resourceUseList,
......@@ -591,7 +595,10 @@ void CommandBufferHelper::bufferRead(ResourceUseList *resourceUseList,
}
ASSERT(!usesBufferForWrite(*buffer));
mUsedBuffers[buffer->getBufferSerial()] = BufferAccess::Read;
if (!mUsedBuffers.contains(buffer->getBufferSerial()))
{
mUsedBuffers.insert(buffer->getBufferSerial(), BufferAccess::Read);
}
}
void CommandBufferHelper::bufferWrite(ResourceUseList *resourceUseList,
......@@ -614,7 +621,7 @@ void CommandBufferHelper::bufferWrite(ResourceUseList *resourceUseList,
if (aliasingMode == BufferAliasingMode::Disallowed)
{
ASSERT(!usesBuffer(*buffer));
mUsedBuffers[buffer->getBufferSerial()] = BufferAccess::Write;
mUsedBuffers.insert(buffer->getBufferSerial(), BufferAccess::Write);
}
}
......
......@@ -1011,7 +1011,8 @@ struct CommandBufferHelper : angle::NonCopyable
uint32_t mDepthStencilAttachmentIndex;
// Tracks resources used in the command buffer.
std::unordered_map<BufferSerial, BufferAccess> mUsedBuffers;
static constexpr uint32_t kFastMapSize = 8;
angle::FastUnorderedMap<BufferSerial, BufferAccess, kFastMapSize> mUsedBuffers;
};
static constexpr uint32_t kInvalidAttachmentIndex = -1;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment