Commit f8dd7b10 by Jamie Madill

Refactor input layout & vertex signatures.

Always size input layouts to gl::MAX_VERTEX_ATTRIBS, and use '1' bits to signal where we have a GPU conversion. The simplification allows us to more cleanly match the vertex executable signatures and makes our default VertexExecutable hit much more often. BUG=510151 TEST=angle_end2end_tests,Canary WebGL, manual testing with Chromium Change-Id: I5009323c4e7e208e7a2595be46658c344517a4ff Reviewed-on: https://chromium-review.googlesource.com/290740Reviewed-by: 's avatarGeoff Lang <geofflang@chromium.org> Tested-by: 's avatarJamie Madill <jmadill@chromium.org> Reviewed-by: 's avatarCorentin Wallez <cwallez@chromium.org>
parent a840617a
......@@ -414,7 +414,8 @@ std::string DynamicHLSL::generateVertexShaderForInputLayout(const std::string &s
if (!shaderAttribute.name.empty())
{
ASSERT(inputIndex < MAX_VERTEX_ATTRIBS);
VertexFormatType vertexFormatType = inputLayout[inputIndex];
VertexFormatType vertexFormatType =
inputIndex < inputLayout.size() ? inputLayout[inputIndex] : VERTEX_FORMAT_INVALID;
// HLSL code for input structure
if (IsMatrixType(shaderAttribute.type))
......
......@@ -57,11 +57,14 @@ GLenum GetTextureType(GLenum samplerType)
return GL_TEXTURE_2D;
}
void GetDefaultInputLayoutFromShader(const std::vector<sh::Attribute> &shaderAttributes,
gl::InputLayout *inputLayoutOut)
gl::InputLayout GetDefaultInputLayoutFromShader(const gl::Shader *vertexShader)
{
for (const sh::Attribute &shaderAttr : shaderAttributes)
gl::InputLayout defaultLayout(gl::MAX_VERTEX_ATTRIBS, gl::VERTEX_FORMAT_INVALID);
const auto &shaderAttributes = vertexShader->getActiveAttributes();
size_t layoutIndex = 0;
for (size_t attribIndex = 0; attribIndex < shaderAttributes.size(); ++attribIndex)
{
const sh::Attribute &shaderAttr = shaderAttributes[attribIndex];
if (shaderAttr.type != GL_NONE)
{
GLenum transposedType = gl::TransposeMatrixType(shaderAttr.type);
......@@ -76,10 +79,12 @@ void GetDefaultInputLayoutFromShader(const std::vector<sh::Attribute> &shaderAtt
gl::VertexFormatType defaultType = gl::GetVertexFormatType(
componentType, GL_FALSE, components, pureInt);
inputLayoutOut->push_back(defaultType);
defaultLayout[layoutIndex++] = defaultType;
}
}
}
return defaultLayout;
}
std::vector<GLenum> GetDefaultOutputLayoutFromShader(const std::vector<PixelShaderOutputVariable> &shaderOutputVars)
......@@ -145,19 +150,16 @@ void ProgramD3D::VertexExecutable::getSignature(RendererD3D *renderer,
const gl::InputLayout &inputLayout,
Signature *signatureOut)
{
signatureOut->resize(inputLayout.size(), gl::VERTEX_FORMAT_INVALID);
signatureOut->assign(inputLayout.size(), false);
for (size_t index = 0; index < inputLayout.size(); ++index)
{
gl::VertexFormatType vertexFormatType = inputLayout[index];
if (vertexFormatType == gl::VERTEX_FORMAT_INVALID)
if (vertexFormatType != gl::VERTEX_FORMAT_INVALID)
{
(*signatureOut)[index] = GL_NONE;
}
else
{
bool gpuConverted = ((renderer->getVertexConversionType(vertexFormatType) & VERTEX_CONVERT_GPU) != 0);
(*signatureOut)[index] = (gpuConverted ? GL_TRUE : GL_FALSE);
VertexConversionType conversionType =
renderer->getVertexConversionType(vertexFormatType);
(*signatureOut)[index] = ((conversionType & VERTEX_CONVERT_GPU) != 0);
}
}
}
......@@ -609,11 +611,11 @@ LinkResult ProgramD3D::load(gl::InfoLog &infoLog, gl::BinaryInputStream *stream)
for (unsigned int vertexShaderIndex = 0; vertexShaderIndex < vertexShaderCount; vertexShaderIndex++)
{
size_t inputLayoutSize = stream->readInt<size_t>();
gl::InputLayout inputLayout;
gl::InputLayout inputLayout(inputLayoutSize, gl::VERTEX_FORMAT_INVALID);
for (size_t inputIndex = 0; inputIndex < inputLayoutSize; inputIndex++)
{
inputLayout.push_back(stream->readInt<gl::VertexFormatType>());
inputLayout[inputIndex] = stream->readInt<gl::VertexFormatType>();
}
unsigned int vertexShaderSize = stream->readInt<unsigned int>();
......@@ -998,8 +1000,7 @@ LinkResult ProgramD3D::compileProgramExecutables(gl::InfoLog &infoLog, gl::Shade
ShaderD3D *vertexShaderD3D = GetImplAs<ShaderD3D>(vertexShader);
ShaderD3D *fragmentShaderD3D = GetImplAs<ShaderD3D>(fragmentShader);
gl::InputLayout defaultInputLayout;
GetDefaultInputLayoutFromShader(vertexShader->getActiveAttributes(), &defaultInputLayout);
const gl::InputLayout &defaultInputLayout = GetDefaultInputLayoutFromShader(vertexShader);
ShaderExecutableD3D *defaultVertexExecutable = NULL;
gl::Error error = getVertexExecutableForInputLayout(defaultInputLayout, &defaultVertexExecutable, &infoLog);
if (error.isError())
......@@ -2059,10 +2060,11 @@ void ProgramD3D::sortAttributesByLayout(const std::vector<TranslatedAttribute> &
void ProgramD3D::updateCachedInputLayout(const gl::Program *program, const gl::State &state)
{
mCachedInputLayout.resize(gl::MAX_VERTEX_ATTRIBS, gl::VERTEX_FORMAT_INVALID);
mCachedInputLayout.assign(gl::MAX_VERTEX_ATTRIBS, gl::VERTEX_FORMAT_INVALID);
const int *semanticIndexes = program->getSemanticIndexes();
const auto &vertexAttributes = state.getVertexArray()->getVertexAttributes();
for (unsigned int attributeIndex = 0; attributeIndex < vertexAttributes.size(); attributeIndex++)
{
int semanticIndex = semanticIndexes[attributeIndex];
......
......@@ -137,7 +137,7 @@ class ProgramD3D : public ProgramImpl
class VertexExecutable
{
public:
typedef std::vector<GLenum> Signature;
typedef std::vector<bool> Signature;
VertexExecutable(const gl::InputLayout &inputLayout,
const Signature &signature,
......
......@@ -27,10 +27,11 @@ namespace rx
namespace
{
void GetInputLayout(const TranslatedAttribute *translatedAttributes[gl::MAX_VERTEX_ATTRIBS],
size_t attributeCount,
gl::InputLayout *inputLayout)
gl::InputLayout GetInputLayout(
const TranslatedAttribute *translatedAttributes[gl::MAX_VERTEX_ATTRIBS],
size_t attributeCount)
{
gl::InputLayout inputLayout(gl::MAX_VERTEX_ATTRIBS, gl::VERTEX_FORMAT_INVALID);
for (size_t attributeIndex = 0; attributeIndex < attributeCount; ++attributeIndex)
{
const TranslatedAttribute *translatedAttribute = translatedAttributes[attributeIndex];
......@@ -40,13 +41,10 @@ void GetInputLayout(const TranslatedAttribute *translatedAttributes[gl::MAX_VERT
gl::VertexFormatType vertexFormatType =
gl::GetVertexFormatType(*translatedAttribute->attribute,
translatedAttribute->currentValueType);
inputLayout->push_back(vertexFormatType);
}
else
{
inputLayout->push_back(gl::VERTEX_FORMAT_INVALID);
inputLayout[attributeIndex] = vertexFormatType;
}
}
return inputLayout;
}
GLenum GetNextGLSLAttributeType(const sh::Attribute *linkedAttributes, int index)
......@@ -327,8 +325,8 @@ gl::Error InputLayoutCache::applyVertexBuffers(const std::vector<TranslatedAttri
}
else
{
gl::InputLayout shaderInputLayout;
GetInputLayout(sortedAttributes, unsortedAttributes.size(), &shaderInputLayout);
const gl::InputLayout &shaderInputLayout =
GetInputLayout(sortedAttributes, unsortedAttributes.size());
ShaderExecutableD3D *shader = nullptr;
gl::Error error = programD3D->getVertexExecutableForInputLayout(shaderInputLayout, &shader, nullptr);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment