Commit f8dd7b10 by Jamie Madill

Refactor input layout & vertex signatures.

Always size input layouts to gl::MAX_VERTEX_ATTRIBS, and use '1' bits to signal where we have a GPU conversion. The simplification allows us to more cleanly match the vertex executable signatures and makes our default VertexExecutable hit much more often. BUG=510151 TEST=angle_end2end_tests,Canary WebGL, manual testing with Chromium Change-Id: I5009323c4e7e208e7a2595be46658c344517a4ff Reviewed-on: https://chromium-review.googlesource.com/290740Reviewed-by: 's avatarGeoff Lang <geofflang@chromium.org> Tested-by: 's avatarJamie Madill <jmadill@chromium.org> Reviewed-by: 's avatarCorentin Wallez <cwallez@chromium.org>
parent a840617a
...@@ -414,7 +414,8 @@ std::string DynamicHLSL::generateVertexShaderForInputLayout(const std::string &s ...@@ -414,7 +414,8 @@ std::string DynamicHLSL::generateVertexShaderForInputLayout(const std::string &s
if (!shaderAttribute.name.empty()) if (!shaderAttribute.name.empty())
{ {
ASSERT(inputIndex < MAX_VERTEX_ATTRIBS); ASSERT(inputIndex < MAX_VERTEX_ATTRIBS);
VertexFormatType vertexFormatType = inputLayout[inputIndex]; VertexFormatType vertexFormatType =
inputIndex < inputLayout.size() ? inputLayout[inputIndex] : VERTEX_FORMAT_INVALID;
// HLSL code for input structure // HLSL code for input structure
if (IsMatrixType(shaderAttribute.type)) if (IsMatrixType(shaderAttribute.type))
......
...@@ -57,11 +57,14 @@ GLenum GetTextureType(GLenum samplerType) ...@@ -57,11 +57,14 @@ GLenum GetTextureType(GLenum samplerType)
return GL_TEXTURE_2D; return GL_TEXTURE_2D;
} }
void GetDefaultInputLayoutFromShader(const std::vector<sh::Attribute> &shaderAttributes, gl::InputLayout GetDefaultInputLayoutFromShader(const gl::Shader *vertexShader)
gl::InputLayout *inputLayoutOut)
{ {
for (const sh::Attribute &shaderAttr : shaderAttributes) gl::InputLayout defaultLayout(gl::MAX_VERTEX_ATTRIBS, gl::VERTEX_FORMAT_INVALID);
const auto &shaderAttributes = vertexShader->getActiveAttributes();
size_t layoutIndex = 0;
for (size_t attribIndex = 0; attribIndex < shaderAttributes.size(); ++attribIndex)
{ {
const sh::Attribute &shaderAttr = shaderAttributes[attribIndex];
if (shaderAttr.type != GL_NONE) if (shaderAttr.type != GL_NONE)
{ {
GLenum transposedType = gl::TransposeMatrixType(shaderAttr.type); GLenum transposedType = gl::TransposeMatrixType(shaderAttr.type);
...@@ -76,10 +79,12 @@ void GetDefaultInputLayoutFromShader(const std::vector<sh::Attribute> &shaderAtt ...@@ -76,10 +79,12 @@ void GetDefaultInputLayoutFromShader(const std::vector<sh::Attribute> &shaderAtt
gl::VertexFormatType defaultType = gl::GetVertexFormatType( gl::VertexFormatType defaultType = gl::GetVertexFormatType(
componentType, GL_FALSE, components, pureInt); componentType, GL_FALSE, components, pureInt);
inputLayoutOut->push_back(defaultType); defaultLayout[layoutIndex++] = defaultType;
} }
} }
} }
return defaultLayout;
} }
std::vector<GLenum> GetDefaultOutputLayoutFromShader(const std::vector<PixelShaderOutputVariable> &shaderOutputVars) std::vector<GLenum> GetDefaultOutputLayoutFromShader(const std::vector<PixelShaderOutputVariable> &shaderOutputVars)
...@@ -145,19 +150,16 @@ void ProgramD3D::VertexExecutable::getSignature(RendererD3D *renderer, ...@@ -145,19 +150,16 @@ void ProgramD3D::VertexExecutable::getSignature(RendererD3D *renderer,
const gl::InputLayout &inputLayout, const gl::InputLayout &inputLayout,
Signature *signatureOut) Signature *signatureOut)
{ {
signatureOut->resize(inputLayout.size(), gl::VERTEX_FORMAT_INVALID); signatureOut->assign(inputLayout.size(), false);
for (size_t index = 0; index < inputLayout.size(); ++index) for (size_t index = 0; index < inputLayout.size(); ++index)
{ {
gl::VertexFormatType vertexFormatType = inputLayout[index]; gl::VertexFormatType vertexFormatType = inputLayout[index];
if (vertexFormatType == gl::VERTEX_FORMAT_INVALID) if (vertexFormatType != gl::VERTEX_FORMAT_INVALID)
{ {
(*signatureOut)[index] = GL_NONE; VertexConversionType conversionType =
} renderer->getVertexConversionType(vertexFormatType);
else (*signatureOut)[index] = ((conversionType & VERTEX_CONVERT_GPU) != 0);
{
bool gpuConverted = ((renderer->getVertexConversionType(vertexFormatType) & VERTEX_CONVERT_GPU) != 0);
(*signatureOut)[index] = (gpuConverted ? GL_TRUE : GL_FALSE);
} }
} }
} }
...@@ -609,11 +611,11 @@ LinkResult ProgramD3D::load(gl::InfoLog &infoLog, gl::BinaryInputStream *stream) ...@@ -609,11 +611,11 @@ LinkResult ProgramD3D::load(gl::InfoLog &infoLog, gl::BinaryInputStream *stream)
for (unsigned int vertexShaderIndex = 0; vertexShaderIndex < vertexShaderCount; vertexShaderIndex++) for (unsigned int vertexShaderIndex = 0; vertexShaderIndex < vertexShaderCount; vertexShaderIndex++)
{ {
size_t inputLayoutSize = stream->readInt<size_t>(); size_t inputLayoutSize = stream->readInt<size_t>();
gl::InputLayout inputLayout; gl::InputLayout inputLayout(inputLayoutSize, gl::VERTEX_FORMAT_INVALID);
for (size_t inputIndex = 0; inputIndex < inputLayoutSize; inputIndex++) for (size_t inputIndex = 0; inputIndex < inputLayoutSize; inputIndex++)
{ {
inputLayout.push_back(stream->readInt<gl::VertexFormatType>()); inputLayout[inputIndex] = stream->readInt<gl::VertexFormatType>();
} }
unsigned int vertexShaderSize = stream->readInt<unsigned int>(); unsigned int vertexShaderSize = stream->readInt<unsigned int>();
...@@ -998,8 +1000,7 @@ LinkResult ProgramD3D::compileProgramExecutables(gl::InfoLog &infoLog, gl::Shade ...@@ -998,8 +1000,7 @@ LinkResult ProgramD3D::compileProgramExecutables(gl::InfoLog &infoLog, gl::Shade
ShaderD3D *vertexShaderD3D = GetImplAs<ShaderD3D>(vertexShader); ShaderD3D *vertexShaderD3D = GetImplAs<ShaderD3D>(vertexShader);
ShaderD3D *fragmentShaderD3D = GetImplAs<ShaderD3D>(fragmentShader); ShaderD3D *fragmentShaderD3D = GetImplAs<ShaderD3D>(fragmentShader);
gl::InputLayout defaultInputLayout; const gl::InputLayout &defaultInputLayout = GetDefaultInputLayoutFromShader(vertexShader);
GetDefaultInputLayoutFromShader(vertexShader->getActiveAttributes(), &defaultInputLayout);
ShaderExecutableD3D *defaultVertexExecutable = NULL; ShaderExecutableD3D *defaultVertexExecutable = NULL;
gl::Error error = getVertexExecutableForInputLayout(defaultInputLayout, &defaultVertexExecutable, &infoLog); gl::Error error = getVertexExecutableForInputLayout(defaultInputLayout, &defaultVertexExecutable, &infoLog);
if (error.isError()) if (error.isError())
...@@ -2059,10 +2060,11 @@ void ProgramD3D::sortAttributesByLayout(const std::vector<TranslatedAttribute> & ...@@ -2059,10 +2060,11 @@ void ProgramD3D::sortAttributesByLayout(const std::vector<TranslatedAttribute> &
void ProgramD3D::updateCachedInputLayout(const gl::Program *program, const gl::State &state) void ProgramD3D::updateCachedInputLayout(const gl::Program *program, const gl::State &state)
{ {
mCachedInputLayout.resize(gl::MAX_VERTEX_ATTRIBS, gl::VERTEX_FORMAT_INVALID); mCachedInputLayout.assign(gl::MAX_VERTEX_ATTRIBS, gl::VERTEX_FORMAT_INVALID);
const int *semanticIndexes = program->getSemanticIndexes(); const int *semanticIndexes = program->getSemanticIndexes();
const auto &vertexAttributes = state.getVertexArray()->getVertexAttributes(); const auto &vertexAttributes = state.getVertexArray()->getVertexAttributes();
for (unsigned int attributeIndex = 0; attributeIndex < vertexAttributes.size(); attributeIndex++) for (unsigned int attributeIndex = 0; attributeIndex < vertexAttributes.size(); attributeIndex++)
{ {
int semanticIndex = semanticIndexes[attributeIndex]; int semanticIndex = semanticIndexes[attributeIndex];
......
...@@ -137,7 +137,7 @@ class ProgramD3D : public ProgramImpl ...@@ -137,7 +137,7 @@ class ProgramD3D : public ProgramImpl
class VertexExecutable class VertexExecutable
{ {
public: public:
typedef std::vector<GLenum> Signature; typedef std::vector<bool> Signature;
VertexExecutable(const gl::InputLayout &inputLayout, VertexExecutable(const gl::InputLayout &inputLayout,
const Signature &signature, const Signature &signature,
......
...@@ -27,10 +27,11 @@ namespace rx ...@@ -27,10 +27,11 @@ namespace rx
namespace namespace
{ {
void GetInputLayout(const TranslatedAttribute *translatedAttributes[gl::MAX_VERTEX_ATTRIBS], gl::InputLayout GetInputLayout(
size_t attributeCount, const TranslatedAttribute *translatedAttributes[gl::MAX_VERTEX_ATTRIBS],
gl::InputLayout *inputLayout) size_t attributeCount)
{ {
gl::InputLayout inputLayout(gl::MAX_VERTEX_ATTRIBS, gl::VERTEX_FORMAT_INVALID);
for (size_t attributeIndex = 0; attributeIndex < attributeCount; ++attributeIndex) for (size_t attributeIndex = 0; attributeIndex < attributeCount; ++attributeIndex)
{ {
const TranslatedAttribute *translatedAttribute = translatedAttributes[attributeIndex]; const TranslatedAttribute *translatedAttribute = translatedAttributes[attributeIndex];
...@@ -40,13 +41,10 @@ void GetInputLayout(const TranslatedAttribute *translatedAttributes[gl::MAX_VERT ...@@ -40,13 +41,10 @@ void GetInputLayout(const TranslatedAttribute *translatedAttributes[gl::MAX_VERT
gl::VertexFormatType vertexFormatType = gl::VertexFormatType vertexFormatType =
gl::GetVertexFormatType(*translatedAttribute->attribute, gl::GetVertexFormatType(*translatedAttribute->attribute,
translatedAttribute->currentValueType); translatedAttribute->currentValueType);
inputLayout->push_back(vertexFormatType); inputLayout[attributeIndex] = vertexFormatType;
}
else
{
inputLayout->push_back(gl::VERTEX_FORMAT_INVALID);
} }
} }
return inputLayout;
} }
GLenum GetNextGLSLAttributeType(const sh::Attribute *linkedAttributes, int index) GLenum GetNextGLSLAttributeType(const sh::Attribute *linkedAttributes, int index)
...@@ -327,8 +325,8 @@ gl::Error InputLayoutCache::applyVertexBuffers(const std::vector<TranslatedAttri ...@@ -327,8 +325,8 @@ gl::Error InputLayoutCache::applyVertexBuffers(const std::vector<TranslatedAttri
} }
else else
{ {
gl::InputLayout shaderInputLayout; const gl::InputLayout &shaderInputLayout =
GetInputLayout(sortedAttributes, unsortedAttributes.size(), &shaderInputLayout); GetInputLayout(sortedAttributes, unsortedAttributes.size());
ShaderExecutableD3D *shader = nullptr; ShaderExecutableD3D *shader = nullptr;
gl::Error error = programD3D->getVertexExecutableForInputLayout(shaderInputLayout, &shader, nullptr); gl::Error error = programD3D->getVertexExecutableForInputLayout(shaderInputLayout, &shader, nullptr);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment