Commit d3dfda2b by Jamie Madill

Refactor how we store vertex formats.

Instead of storing a vertex format as a struct with the full info, instead use an enum, and look up the info when we need it. This saves a lot of constructor initialization time, operator comparison time, and storage. It also will allow us to look up D3D format info more quickly. BUG=angleproject:959 Change-Id: I202fd1ea96981073bc1b5b232b1ec3efa91485cb Reviewed-on: https://chromium-review.googlesource.com/277289Reviewed-by: 's avatarGeoff Lang <geofflang@chromium.org> Tested-by: 's avatarJamie Madill <jmadill@chromium.org>
parent 2810a20c
......@@ -128,110 +128,6 @@ bool ClipRectangle(const Rectangle &source, const Rectangle &clip, Rectangle *in
}
}
VertexFormat::VertexFormat()
: mType(GL_NONE),
mNormalized(GL_FALSE),
mComponents(0),
mPureInteger(false)
{}
VertexFormat::VertexFormat(GLenum type, GLboolean normalized, GLuint components, bool pureInteger)
: mType(type),
mNormalized(normalized),
mComponents(components),
mPureInteger(pureInteger)
{
// Float data can not be normalized, so ignore the user setting
if (mType == GL_FLOAT || mType == GL_HALF_FLOAT || mType == GL_FIXED)
{
mNormalized = GL_FALSE;
}
}
VertexFormat::VertexFormat(const VertexAttribute &attrib)
: mType(attrib.type),
mNormalized(attrib.normalized ? GL_TRUE : GL_FALSE),
mComponents(attrib.size),
mPureInteger(attrib.pureInteger)
{
// Ensure we aren't initializing a vertex format which should be using
// the current-value type
ASSERT(attrib.enabled);
// Float data can not be normalized, so ignore the user setting
if (mType == GL_FLOAT || mType == GL_HALF_FLOAT || mType == GL_FIXED)
{
mNormalized = GL_FALSE;
}
}
VertexFormat::VertexFormat(const VertexAttribute &attrib, GLenum currentValueType)
: mType(attrib.type),
mNormalized(attrib.normalized ? GL_TRUE : GL_FALSE),
mComponents(attrib.size),
mPureInteger(attrib.pureInteger)
{
if (!attrib.enabled)
{
mType = currentValueType;
mNormalized = GL_FALSE;
mComponents = 4;
mPureInteger = (currentValueType != GL_FLOAT);
}
// Float data can not be normalized, so ignore the user setting
if (mType == GL_FLOAT || mType == GL_HALF_FLOAT || mType == GL_FIXED)
{
mNormalized = GL_FALSE;
}
}
void VertexFormat::GetInputLayout(VertexFormat *inputLayout,
Program *program,
const State &state)
{
const std::vector<VertexAttribute> &vertexAttributes = state.getVertexArray()->getVertexAttributes();
for (unsigned int attributeIndex = 0; attributeIndex < vertexAttributes.size(); attributeIndex++)
{
int semanticIndex = program->getSemanticIndex(attributeIndex);
if (semanticIndex != -1)
{
inputLayout[semanticIndex] = VertexFormat(vertexAttributes[attributeIndex], state.getVertexAttribCurrentValue(attributeIndex).Type);
}
}
}
bool VertexFormat::operator==(const VertexFormat &other) const
{
return (mType == other.mType &&
mComponents == other.mComponents &&
mNormalized == other.mNormalized &&
mPureInteger == other.mPureInteger );
}
bool VertexFormat::operator!=(const VertexFormat &other) const
{
return !(*this == other);
}
bool VertexFormat::operator<(const VertexFormat& other) const
{
if (mType != other.mType)
{
return mType < other.mType;
}
if (mNormalized != other.mNormalized)
{
return mNormalized < other.mNormalized;
}
if (mComponents != other.mComponents)
{
return mComponents < other.mComponents;
}
return mPureInteger < other.mPureInteger;
}
bool Box::operator==(const Box &other) const
{
return (x == other.x && y == other.y && z == other.z &&
......
......@@ -256,27 +256,6 @@ struct PixelPackState
{}
};
struct VertexFormat
{
GLenum mType;
GLboolean mNormalized;
GLuint mComponents;
bool mPureInteger;
VertexFormat();
VertexFormat(GLenum type, GLboolean normalized, GLuint components, bool pureInteger);
explicit VertexFormat(const VertexAttribute &attribute);
VertexFormat(const VertexAttribute &attribute, GLenum currentValueType);
static void GetInputLayout(VertexFormat *inputLayout,
Program *program,
const State& currentValues);
bool operator==(const VertexFormat &other) const;
bool operator!=(const VertexFormat &other) const;
bool operator<(const VertexFormat& other) const;
};
}
namespace rx
......
......@@ -76,6 +76,118 @@ GLenum GetSizedInternalFormat(GLenum internalFormat, GLenum type);
typedef std::set<GLenum> FormatSet;
const FormatSet &GetAllSizedInternalFormats();
enum VertexFormatType
{
VERTEX_FORMAT_INVALID,
VERTEX_FORMAT_SBYTE1,
VERTEX_FORMAT_SBYTE1_NORM,
VERTEX_FORMAT_SBYTE2,
VERTEX_FORMAT_SBYTE2_NORM,
VERTEX_FORMAT_SBYTE3,
VERTEX_FORMAT_SBYTE3_NORM,
VERTEX_FORMAT_SBYTE4,
VERTEX_FORMAT_SBYTE4_NORM,
VERTEX_FORMAT_UBYTE1,
VERTEX_FORMAT_UBYTE1_NORM,
VERTEX_FORMAT_UBYTE2,
VERTEX_FORMAT_UBYTE2_NORM,
VERTEX_FORMAT_UBYTE3,
VERTEX_FORMAT_UBYTE3_NORM,
VERTEX_FORMAT_UBYTE4,
VERTEX_FORMAT_UBYTE4_NORM,
VERTEX_FORMAT_SSHORT1,
VERTEX_FORMAT_SSHORT1_NORM,
VERTEX_FORMAT_SSHORT2,
VERTEX_FORMAT_SSHORT2_NORM,
VERTEX_FORMAT_SSHORT3,
VERTEX_FORMAT_SSHORT3_NORM,
VERTEX_FORMAT_SSHORT4,
VERTEX_FORMAT_SSHORT4_NORM,
VERTEX_FORMAT_USHORT1,
VERTEX_FORMAT_USHORT1_NORM,
VERTEX_FORMAT_USHORT2,
VERTEX_FORMAT_USHORT2_NORM,
VERTEX_FORMAT_USHORT3,
VERTEX_FORMAT_USHORT3_NORM,
VERTEX_FORMAT_USHORT4,
VERTEX_FORMAT_USHORT4_NORM,
VERTEX_FORMAT_SINT1,
VERTEX_FORMAT_SINT1_NORM,
VERTEX_FORMAT_SINT2,
VERTEX_FORMAT_SINT2_NORM,
VERTEX_FORMAT_SINT3,
VERTEX_FORMAT_SINT3_NORM,
VERTEX_FORMAT_SINT4,
VERTEX_FORMAT_SINT4_NORM,
VERTEX_FORMAT_UINT1,
VERTEX_FORMAT_UINT1_NORM,
VERTEX_FORMAT_UINT2,
VERTEX_FORMAT_UINT2_NORM,
VERTEX_FORMAT_UINT3,
VERTEX_FORMAT_UINT3_NORM,
VERTEX_FORMAT_UINT4,
VERTEX_FORMAT_UINT4_NORM,
VERTEX_FORMAT_SBYTE1_INT,
VERTEX_FORMAT_SBYTE2_INT,
VERTEX_FORMAT_SBYTE3_INT,
VERTEX_FORMAT_SBYTE4_INT,
VERTEX_FORMAT_UBYTE1_INT,
VERTEX_FORMAT_UBYTE2_INT,
VERTEX_FORMAT_UBYTE3_INT,
VERTEX_FORMAT_UBYTE4_INT,
VERTEX_FORMAT_SSHORT1_INT,
VERTEX_FORMAT_SSHORT2_INT,
VERTEX_FORMAT_SSHORT3_INT,
VERTEX_FORMAT_SSHORT4_INT,
VERTEX_FORMAT_USHORT1_INT,
VERTEX_FORMAT_USHORT2_INT,
VERTEX_FORMAT_USHORT3_INT,
VERTEX_FORMAT_USHORT4_INT,
VERTEX_FORMAT_SINT1_INT,
VERTEX_FORMAT_SINT2_INT,
VERTEX_FORMAT_SINT3_INT,
VERTEX_FORMAT_SINT4_INT,
VERTEX_FORMAT_UINT1_INT,
VERTEX_FORMAT_UINT2_INT,
VERTEX_FORMAT_UINT3_INT,
VERTEX_FORMAT_UINT4_INT,
VERTEX_FORMAT_FIXED1,
VERTEX_FORMAT_FIXED2,
VERTEX_FORMAT_FIXED3,
VERTEX_FORMAT_FIXED4,
VERTEX_FORMAT_HALF1,
VERTEX_FORMAT_HALF2,
VERTEX_FORMAT_HALF3,
VERTEX_FORMAT_HALF4,
VERTEX_FORMAT_FLOAT1,
VERTEX_FORMAT_FLOAT2,
VERTEX_FORMAT_FLOAT3,
VERTEX_FORMAT_FLOAT4,
VERTEX_FORMAT_SINT210,
VERTEX_FORMAT_UINT210,
VERTEX_FORMAT_SINT210_NORM,
VERTEX_FORMAT_UINT210_NORM,
VERTEX_FORMAT_SINT210_INT,
VERTEX_FORMAT_UINT210_INT,
};
typedef std::vector<gl::VertexFormatType> InputLayout;
struct VertexFormat : angle::NonCopyable
{
VertexFormat(GLenum typeIn, GLboolean normalizedIn, GLuint componentsIn, bool pureIntegerIn);
GLenum type;
GLboolean normalized;
GLuint components;
bool pureInteger;
};
VertexFormatType GetVertexFormatType(GLenum type, GLboolean normalized, GLuint components, bool pureInteger);
VertexFormatType GetVertexFormatType(const VertexAttribute &attrib);
VertexFormatType GetVertexFormatType(const VertexAttribute &attrib, GLenum currentValueType);
const VertexFormat &GetVertexFormatFromType(VertexFormatType vertexFormatType);
}
#endif // LIBANGLE_FORMATUTILS_H_
......@@ -380,7 +380,7 @@ std::string DynamicHLSL::generateVaryingHLSL(const ShaderD3D *shader) const
}
std::string DynamicHLSL::generateVertexShaderForInputLayout(const std::string &sourceShader,
const VertexFormat inputLayout[],
const InputLayout &inputLayout,
const std::vector<sh::Attribute> &shaderAttributes) const
{
std::string structHLSL, initHLSL;
......@@ -414,7 +414,7 @@ std::string DynamicHLSL::generateVertexShaderForInputLayout(const std::string &s
if (!shaderAttribute.name.empty())
{
ASSERT(inputIndex < MAX_VERTEX_ATTRIBS);
const VertexFormat &vertexFormat = inputLayout[inputIndex];
VertexFormatType vertexFormatType = inputLayout[inputIndex];
// HLSL code for input structure
if (IsMatrixType(shaderAttribute.type))
......@@ -424,7 +424,7 @@ std::string DynamicHLSL::generateVertexShaderForInputLayout(const std::string &s
}
else
{
GLenum componentType = mRenderer->getVertexComponentType(vertexFormat);
GLenum componentType = mRenderer->getVertexComponentType(vertexFormatType);
if (shaderAttribute.name == "gl_InstanceID")
{
......@@ -458,9 +458,9 @@ std::string DynamicHLSL::generateVertexShaderForInputLayout(const std::string &s
// data reinterpretation (eg for pure integer->float, float->pure integer)
// TODO: issue warning with gl debug info extension, when supported
if (IsMatrixType(shaderAttribute.type) ||
(mRenderer->getVertexConversionType(vertexFormat) & VERTEX_CONVERT_GPU) != 0)
(mRenderer->getVertexConversionType(vertexFormatType) & VERTEX_CONVERT_GPU) != 0)
{
initHLSL += generateAttributeConversionHLSL(vertexFormat, shaderAttribute);
initHLSL += generateAttributeConversionHLSL(vertexFormatType, shaderAttribute);
}
else
{
......@@ -1214,8 +1214,10 @@ std::string DynamicHLSL::decorateVariable(const std::string &name)
return name;
}
std::string DynamicHLSL::generateAttributeConversionHLSL(const VertexFormat &vertexFormat, const sh::ShaderVariable &shaderAttrib) const
std::string DynamicHLSL::generateAttributeConversionHLSL(gl::VertexFormatType vertexFormatType,
const sh::ShaderVariable &shaderAttrib) const
{
const gl::VertexFormat &vertexFormat = gl::GetVertexFormatFromType(vertexFormatType);
std::string attribString = "input." + decorateVariable(shaderAttrib.name);
// Matrix
......@@ -1228,12 +1230,12 @@ std::string DynamicHLSL::generateAttributeConversionHLSL(const VertexFormat &ver
int shaderComponentCount = VariableComponentCount(shaderAttrib.type);
// Perform integer to float conversion (if necessary)
bool requiresTypeConversion = (shaderComponentType == GL_FLOAT && vertexFormat.mType != GL_FLOAT);
bool requiresTypeConversion = (shaderComponentType == GL_FLOAT && vertexFormat.type != GL_FLOAT);
if (requiresTypeConversion)
{
// TODO: normalization for 32-bit integer formats
ASSERT(!vertexFormat.mNormalized && !vertexFormat.mPureInteger);
ASSERT(!vertexFormat.normalized && !vertexFormat.pureInteger);
return "float" + Str(shaderComponentCount) + "(" + attribString + ")";
}
......@@ -1241,22 +1243,4 @@ std::string DynamicHLSL::generateAttributeConversionHLSL(const VertexFormat &ver
return attribString;
}
void DynamicHLSL::getInputLayoutSignature(const VertexFormat inputLayout[], GLenum signature[]) const
{
for (size_t inputIndex = 0; inputIndex < MAX_VERTEX_ATTRIBS; inputIndex++)
{
const VertexFormat &vertexFormat = inputLayout[inputIndex];
if (vertexFormat.mType == GL_NONE)
{
signature[inputIndex] = GL_NONE;
}
else
{
bool gpuConverted = ((mRenderer->getVertexConversionType(vertexFormat) & VERTEX_CONVERT_GPU) != 0);
signature[inputIndex] = (gpuConverted ? GL_TRUE : GL_FALSE);
}
}
}
}
......@@ -9,13 +9,13 @@
#ifndef LIBANGLE_RENDERER_D3D_DYNAMICHLSL_H_
#define LIBANGLE_RENDERER_D3D_DYNAMICHLSL_H_
#include "common/angleutils.h"
#include "libANGLE/Constants.h"
#include <map>
#include <vector>
#include "angle_gl.h"
#include <vector>
#include <map>
#include "common/angleutils.h"
#include "libANGLE/Constants.h"
#include "libANGLE/formatutils.h"
namespace sh
{
......@@ -29,7 +29,6 @@ class InfoLog;
struct VariableLocation;
struct LinkedVarying;
struct VertexAttribute;
struct VertexFormat;
struct PackedVarying;
struct Data;
}
......@@ -57,7 +56,7 @@ class DynamicHLSL : angle::NonCopyable
int packVaryings(gl::InfoLog &infoLog, VaryingPacking packing, ShaderD3D *fragmentShader,
ShaderD3D *vertexShader, const std::vector<std::string>& transformFeedbackVaryings);
std::string generateVertexShaderForInputLayout(const std::string &sourceShader,
const gl::VertexFormat inputLayout[],
const gl::InputLayout &inputLayout,
const std::vector<sh::Attribute> &shaderAttributes) const;
std::string generatePixelShaderForOutputSignature(const std::string &sourceShader, const std::vector<PixelShaderOutputVariable> &outputVariables,
bool usesFragDepth, const std::vector<GLenum> &outputLayout) const;
......@@ -72,7 +71,6 @@ class DynamicHLSL : angle::NonCopyable
bool *outUsesFragDepth) const;
std::string generateGeometryShaderHLSL(int registers, ShaderD3D *fragmentShader, ShaderD3D *vertexShader) const;
void getInputLayoutSignature(const gl::VertexFormat inputLayout[], GLenum signature[]) const;
private:
RendererD3D *const mRenderer;
......@@ -92,7 +90,8 @@ class DynamicHLSL : angle::NonCopyable
// Prepend an underscore
static std::string decorateVariable(const std::string &name);
std::string generateAttributeConversionHLSL(const gl::VertexFormat &vertexFormat, const sh::ShaderVariable &shaderAttrib) const;
std::string generateAttributeConversionHLSL(gl::VertexFormatType vertexFormatType,
const sh::ShaderVariable &shaderAttrib) const;
};
}
......
......@@ -9,16 +9,17 @@
#ifndef LIBANGLE_RENDERER_D3D_PROGRAMD3D_H_
#define LIBANGLE_RENDERER_D3D_PROGRAMD3D_H_
#include <string>
#include <vector>
#include "common/Optional.h"
#include "compiler/translator/blocklayoutHLSL.h"
#include "libANGLE/Constants.h"
#include "libANGLE/formatutils.h"
#include "libANGLE/renderer/ProgramImpl.h"
#include "libANGLE/renderer/Workarounds.h"
#include "libANGLE/renderer/d3d/DynamicHLSL.h"
#include <string>
#include <vector>
namespace gl
{
struct LinkedUniform;
......@@ -65,7 +66,7 @@ class ProgramD3D : public ProgramImpl
gl::Error getPixelExecutableForFramebuffer(const gl::Framebuffer *fbo, ShaderExecutableD3D **outExectuable);
gl::Error getPixelExecutableForOutputLayout(const std::vector<GLenum> &outputLayout, ShaderExecutableD3D **outExectuable, gl::InfoLog *infoLog);
gl::Error getVertexExecutableForInputLayout(const gl::VertexFormat inputLayout[gl::MAX_VERTEX_ATTRIBS], ShaderExecutableD3D **outExectuable, gl::InfoLog *infoLog);
gl::Error getVertexExecutableForInputLayout(const gl::InputLayout &inputLayout, ShaderExecutableD3D **outExectuable, gl::InfoLog *infoLog);
ShaderExecutableD3D *getGeometryExecutable() const { return mGeometryExecutable; }
LinkResult compileProgramExecutables(gl::InfoLog &infoLog, gl::Shader *fragmentShader, gl::Shader *vertexShader,
......@@ -80,8 +81,6 @@ class ProgramD3D : public ProgramImpl
void bindAttributeLocation(GLuint index, const std::string &name) override;
void getInputLayoutSignature(const gl::VertexFormat inputLayout[], GLenum signature[]) const;
void initializeUniformStorage();
gl::Error applyUniforms();
gl::Error applyUniformBuffers(const gl::Data &data, GLuint uniformBlockBindings[]) override;
......@@ -131,24 +130,32 @@ class ProgramD3D : public ProgramImpl
int sortedSemanticIndicesOut[gl::MAX_VERTEX_ATTRIBS],
const rx::TranslatedAttribute *sortedAttributesOut[gl::MAX_VERTEX_ATTRIBS]) const;
void updateCachedInputLayout(const gl::Program *program, const gl::State &state);
const gl::InputLayout &getCachedInputLayout() const { return mCachedInputLayout; }
private:
class VertexExecutable
{
public:
VertexExecutable(const gl::VertexFormat inputLayout[gl::MAX_VERTEX_ATTRIBS],
const GLenum signature[gl::MAX_VERTEX_ATTRIBS],
typedef std::vector<GLenum> Signature;
VertexExecutable(const gl::InputLayout &inputLayout,
const Signature &signature,
ShaderExecutableD3D *shaderExecutable);
~VertexExecutable();
bool matchesSignature(const GLenum convertedLayout[gl::MAX_VERTEX_ATTRIBS]) const;
bool matchesSignature(const Signature &signature) const;
static void getSignature(RendererD3D *renderer,
const gl::InputLayout &inputLayout,
Signature *signatureOut);
const gl::VertexFormat *inputs() const { return mInputs; }
const GLenum *signature() const { return mSignature; }
const gl::InputLayout &inputs() const { return mInputs; }
const Signature &signature() const { return mSignature; }
ShaderExecutableD3D *shaderExecutable() const { return mShaderExecutable; }
private:
gl::VertexFormat mInputs[gl::MAX_VERTEX_ATTRIBS];
GLenum mSignature[gl::MAX_VERTEX_ATTRIBS];
gl::InputLayout mInputs;
Signature mSignature;
ShaderExecutableD3D *mShaderExecutable;
};
......@@ -243,6 +250,8 @@ class ProgramD3D : public ProgramImpl
std::vector<GLint> mVertexUBOCache;
std::vector<GLint> mFragmentUBOCache;
VertexExecutable::Signature mCachedVertexSignature;
gl::InputLayout mCachedInputLayout;
static unsigned int issueSerial();
static unsigned int mCurrentSerial;
......
......@@ -21,6 +21,7 @@
#include "libANGLE/renderer/d3d/BufferD3D.h"
#include "libANGLE/renderer/d3d/DisplayD3D.h"
#include "libANGLE/renderer/d3d/IndexDataManager.h"
#include "libANGLE/renderer/d3d/ProgramD3D.h"
namespace rx
{
......@@ -31,6 +32,7 @@ namespace
// release and recreate the scratch buffer. This ensures we don't have a
// degenerate case where we are stuck hogging memory.
const int ScratchMemoryBufferLifetime = 1000;
}
const uintptr_t RendererD3D::DirtyPointer = std::numeric_limits<uintptr_t>::max();
......@@ -366,13 +368,11 @@ gl::Error RendererD3D::applyState(const gl::Data &data, GLenum drawMode)
gl::Error RendererD3D::applyShaders(const gl::Data &data)
{
gl::Program *program = data.state->getProgram();
gl::VertexFormat inputLayout[gl::MAX_VERTEX_ATTRIBS];
gl::VertexFormat::GetInputLayout(inputLayout, program, *data.state);
GetImplAs<ProgramD3D>(program)->updateCachedInputLayout(program, *data.state);
const gl::Framebuffer *fbo = data.state->getDrawFramebuffer();
gl::Error error = applyShaders(program, inputLayout, fbo, data.state->getRasterizerState().rasterizerDiscard, data.state->isTransformFeedbackActiveUnpaused());
gl::Error error = applyShaders(program, fbo, data.state->getRasterizerState().rasterizerDiscard, data.state->isTransformFeedbackActiveUnpaused());
if (error.isError())
{
return error;
......
......@@ -12,6 +12,7 @@
#include "common/debug.h"
#include "common/MemoryBuffer.h"
#include "libANGLE/Data.h"
#include "libANGLe/formatutils.h"
#include "libANGLE/renderer/Renderer.h"
#include "libANGLE/renderer/d3d/VertexDataManager.h"
#include "libANGLE/renderer/d3d/formatutilsD3D.h"
......@@ -77,8 +78,8 @@ class BufferFactoryD3D
virtual IndexBuffer *createIndexBuffer() = 0;
// TODO(jmadill): add VertexFormatCaps
virtual VertexConversionType getVertexConversionType(const gl::VertexFormat &vertexFormat) const = 0;
virtual GLenum getVertexComponentType(const gl::VertexFormat &vertexFormat) const = 0;
virtual VertexConversionType getVertexConversionType(gl::VertexFormatType vertexFormatType) const = 0;
virtual GLenum getVertexComponentType(gl::VertexFormatType vertexFormatType) const = 0;
};
class RendererD3D : public Renderer, public BufferFactoryD3D
......@@ -130,8 +131,10 @@ class RendererD3D : public Renderer, public BufferFactoryD3D
bool ignoreViewport) = 0;
virtual gl::Error applyRenderTarget(const gl::Framebuffer *frameBuffer) = 0;
virtual gl::Error applyShaders(gl::Program *program, const gl::VertexFormat inputLayout[], const gl::Framebuffer *framebuffer,
bool rasterizerDiscard, bool transformFeedbackActive) = 0;
virtual gl::Error applyShaders(gl::Program *program,
const gl::Framebuffer *framebuffer,
bool rasterizerDiscard,
bool transformFeedbackActive) = 0;
virtual gl::Error applyUniforms(const ProgramImpl &program, const std::vector<gl::LinkedUniform*> &uniformArray) = 0;
virtual bool applyPrimitiveType(GLenum primitiveType, GLsizei elementCount, bool usesPointSize) = 0;
virtual gl::Error applyVertexBuffer(const gl::State &state, GLenum mode, GLint first, GLsizei count, GLsizei instances, SourceIndexData *sourceIndexInfo) = 0;
......
......@@ -188,14 +188,14 @@ bool VertexBufferInterface::directStoragePossible(const gl::VertexAttribute &att
if (attrib.type != GL_FLOAT)
{
gl::VertexFormat vertexFormat(attrib, currentValueType);
gl::VertexFormatType vertexFormatType = gl::GetVertexFormatType(attrib, currentValueType);
unsigned int outputElementSize;
getVertexBuffer()->getSpaceRequired(attrib, 1, 0, &outputElementSize);
alignment = std::min<size_t>(outputElementSize, 4);
// TODO(jmadill): add VertexFormatCaps
requiresConversion = (mFactory->getVertexConversionType(vertexFormat) & VERTEX_CONVERT_CPU) != 0;
requiresConversion = (mFactory->getVertexConversionType(vertexFormatType) & VERTEX_CONVERT_CPU) != 0;
}
bool isAligned = (static_cast<size_t>(ComputeVertexAttributeStride(attrib)) % alignment == 0) &&
......
......@@ -28,16 +28,22 @@ namespace
void GetInputLayout(const TranslatedAttribute *translatedAttributes[gl::MAX_VERTEX_ATTRIBS],
size_t attributeCount,
gl::VertexFormat inputLayout[gl::MAX_VERTEX_ATTRIBS])
gl::InputLayout *inputLayout)
{
for (size_t attributeIndex = 0; attributeIndex < attributeCount; ++attributeIndex)
{
const TranslatedAttribute *translatedAttribute = translatedAttributes[attributeIndex];
if (translatedAttributes[attributeIndex]->active)
if (translatedAttribute->active)
{
inputLayout[attributeIndex] = gl::VertexFormat(*translatedAttribute->attribute,
translatedAttribute->currentValueType);
gl::VertexFormatType vertexFormatType =
gl::GetVertexFormatType(*translatedAttribute->attribute,
translatedAttribute->currentValueType);
inputLayout->push_back(vertexFormatType);
}
else
{
inputLayout->push_back(gl::VERTEX_FORMAT_INVALID);
}
}
}
......@@ -167,8 +173,8 @@ gl::Error InputLayoutCache::applyVertexBuffers(const std::vector<TranslatedAttri
// If rendering points and instanced pointsprite emulation is being used, the inputClass is required to be configured as per instance data
inputClass = instancedPointSpritesActive ? D3D11_INPUT_PER_INSTANCE_DATA : inputClass;
gl::VertexFormat vertexFormat(*sortedAttributes[i]->attribute, sortedAttributes[i]->currentValueType);
const d3d11::VertexFormat &vertexFormatInfo = d3d11::GetVertexFormatInfo(vertexFormat, mFeatureLevel);
gl::VertexFormatType vertexFormatType = gl::GetVertexFormatType(*sortedAttributes[i]->attribute, sortedAttributes[i]->currentValueType);
const d3d11::VertexFormat &vertexFormatInfo = d3d11::GetVertexFormatInfo(vertexFormatType, mFeatureLevel);
// Record the type of the associated vertex shader vector in our key
// This will prevent mismatched vertex shaders from using the same input layout
......@@ -280,8 +286,8 @@ gl::Error InputLayoutCache::applyVertexBuffers(const std::vector<TranslatedAttri
}
else
{
gl::VertexFormat shaderInputLayout[gl::MAX_VERTEX_ATTRIBS];
GetInputLayout(sortedAttributes, unsortedAttributes.size(), shaderInputLayout);
gl::InputLayout shaderInputLayout;
GetInputLayout(sortedAttributes, unsortedAttributes.size(), &shaderInputLayout);
ShaderExecutableD3D *shader = nullptr;
gl::Error error = programD3D->getVertexExecutableForInputLayout(shaderInputLayout, &shader, nullptr);
......
......@@ -2000,10 +2000,13 @@ gl::Error Renderer11::drawTriangleFan(GLsizei count, GLenum type, const GLvoid *
return gl::Error(GL_NO_ERROR);
}
gl::Error Renderer11::applyShaders(gl::Program *program, const gl::VertexFormat inputLayout[], const gl::Framebuffer *framebuffer,
bool rasterizerDiscard, bool transformFeedbackActive)
gl::Error Renderer11::applyShaders(gl::Program *program,
const gl::Framebuffer *framebuffer,
bool rasterizerDiscard,
bool transformFeedbackActive)
{
ProgramD3D *programD3D = GetImplAs<ProgramD3D>(program);
const auto &inputLayout = programD3D->getCachedInputLayout();
ShaderExecutableD3D *vertexExe = NULL;
gl::Error error = programD3D->getVertexExecutableForInputLayout(inputLayout, &vertexExe, nullptr);
......@@ -3732,14 +3735,14 @@ bool Renderer11::getLUID(LUID *adapterLuid) const
return true;
}
VertexConversionType Renderer11::getVertexConversionType(const gl::VertexFormat &vertexFormat) const
VertexConversionType Renderer11::getVertexConversionType(gl::VertexFormatType vertexFormatType) const
{
return d3d11::GetVertexFormatInfo(vertexFormat, mRenderer11DeviceCaps.featureLevel).conversionType;
return d3d11::GetVertexFormatInfo(vertexFormatType, mRenderer11DeviceCaps.featureLevel).conversionType;
}
GLenum Renderer11::getVertexComponentType(const gl::VertexFormat &vertexFormat) const
GLenum Renderer11::getVertexComponentType(gl::VertexFormatType vertexFormatType) const
{
return d3d11::GetDXGIFormatInfo(d3d11::GetVertexFormatInfo(vertexFormat, mRenderer11DeviceCaps.featureLevel).nativeFormat).componentType;
return d3d11::GetDXGIFormatInfo(d3d11::GetVertexFormatInfo(vertexFormatType, mRenderer11DeviceCaps.featureLevel).nativeFormat).componentType;
}
void Renderer11::generateCaps(gl::Caps *outCaps, gl::TextureCapsMap *outTextureCaps, gl::Extensions *outExtensions) const
......
......@@ -130,8 +130,10 @@ class Renderer11 : public RendererD3D
virtual bool applyPrimitiveType(GLenum mode, GLsizei count, bool usesPointSize);
gl::Error applyRenderTarget(const gl::Framebuffer *frameBuffer) override;
virtual gl::Error applyShaders(gl::Program *program, const gl::VertexFormat inputLayout[], const gl::Framebuffer *framebuffer,
bool rasterizerDiscard, bool transformFeedbackActive);
gl::Error applyShaders(gl::Program *program,
const gl::Framebuffer *framebuffer,
bool rasterizerDiscard,
bool transformFeedbackActive) override;
virtual gl::Error applyUniforms(const ProgramImpl &program, const std::vector<gl::LinkedUniform*> &uniformArray);
virtual gl::Error applyVertexBuffer(const gl::State &state, GLenum mode, GLint first, GLsizei count, GLsizei instances, SourceIndexData *sourceIndexInfo);
......@@ -248,8 +250,8 @@ class Renderer11 : public RendererD3D
gl::Error packPixels(ID3D11Texture2D *readTexture, const PackPixelsParams &params, uint8_t *pixelsOut);
bool getLUID(LUID *adapterLuid) const override;
virtual VertexConversionType getVertexConversionType(const gl::VertexFormat &vertexFormat) const;
virtual GLenum getVertexComponentType(const gl::VertexFormat &vertexFormat) const;
VertexConversionType getVertexConversionType(gl::VertexFormatType vertexFormatType) const override;
GLenum getVertexComponentType(gl::VertexFormatType vertexFormatType) const override;
gl::Error readTextureData(ID3D11Texture2D *texture, unsigned int subResource, const gl::Rectangle &area, GLenum format,
GLenum type, GLuint outputPitch, const gl::PixelPackState &pack, uint8_t *pixels);
......
......@@ -10,6 +10,7 @@
#include "libANGLE/Buffer.h"
#include "libANGLE/VertexAttribute.h"
#include "libANGLE/formatutils.h"
#include "libANGLE/renderer/d3d/d3d11/Buffer11.h"
#include "libANGLE/renderer/d3d/d3d11/formatutils11.h"
#include "libANGLE/renderer/d3d/d3d11/Renderer11.h"
......@@ -134,9 +135,9 @@ gl::Error VertexBuffer11::storeVertexAttributes(const gl::VertexAttribute &attri
input += inputStride * start;
}
gl::VertexFormat vertexFormat(attrib, currentValueType);
gl::VertexFormatType vertexFormatType = gl::GetVertexFormatType(attrib, currentValueType);
const D3D_FEATURE_LEVEL featureLevel = mRenderer->getRenderer11DeviceCaps().featureLevel;
const d3d11::VertexFormat &vertexFormatInfo = d3d11::GetVertexFormatInfo(vertexFormat, featureLevel);
const d3d11::VertexFormat &vertexFormatInfo = d3d11::GetVertexFormatInfo(vertexFormatType, featureLevel);
ASSERT(vertexFormatInfo.copyFunction != NULL);
vertexFormatInfo.copyFunction(input, inputStride, count, output);
......@@ -159,9 +160,9 @@ gl::Error VertexBuffer11::getSpaceRequired(const gl::VertexAttribute &attrib, GL
elementCount = UnsignedCeilDivide(static_cast<unsigned int>(instances), attrib.divisor);
}
gl::VertexFormat vertexFormat(attrib);
gl::VertexFormatType formatType = gl::GetVertexFormatType(attrib);
const D3D_FEATURE_LEVEL featureLevel = mRenderer->getRenderer11DeviceCaps().featureLevel;
const d3d11::VertexFormat &vertexFormatInfo = d3d11::GetVertexFormatInfo(vertexFormat, featureLevel);
const d3d11::VertexFormat &vertexFormatInfo = d3d11::GetVertexFormatInfo(formatType, featureLevel);
const d3d11::DXGIFormat &dxgiFormatInfo = d3d11::GetDXGIFormatInfo(vertexFormatInfo.nativeFormat);
unsigned int elementSize = dxgiFormatInfo.pixelBytes;
if (elementSize <= std::numeric_limits<unsigned int>::max() / elementCount)
......
......@@ -1099,8 +1099,8 @@ const TextureFormat &GetTextureFormatInfo(GLenum internalFormat, const Renderer1
return defaultInfo;
}
typedef std::map<gl::VertexFormat, VertexFormat> D3D11VertexFormatInfoMap;
typedef std::pair<gl::VertexFormat, VertexFormat> D3D11VertexFormatPair;
typedef std::map<gl::VertexFormatType, VertexFormat> D3D11VertexFormatInfoMap;
typedef std::pair<gl::VertexFormatType, VertexFormat> D3D11VertexFormatPair;
VertexFormat::VertexFormat()
: conversionType(VERTEX_CONVERT_NONE),
......@@ -1109,30 +1109,39 @@ VertexFormat::VertexFormat()
{
}
static void AddVertexFormatInfo(D3D11VertexFormatInfoMap *map, GLenum inputType, GLboolean normalized, GLuint componentCount,
VertexConversionType conversionType, DXGI_FORMAT nativeFormat, VertexCopyFunction copyFunction)
static void AddVertexFormatInfo(D3D11VertexFormatInfoMap *map,
GLenum inputType,
GLboolean normalized,
GLuint componentCount,
VertexConversionType conversionType,
DXGI_FORMAT nativeFormat,
VertexCopyFunction copyFunction)
{
gl::VertexFormat inputFormat(inputType, normalized, componentCount, false);
gl::VertexFormatType formatType = gl::GetVertexFormatType(inputType, normalized, componentCount, false);
VertexFormat info;
info.conversionType = conversionType;
info.nativeFormat = nativeFormat;
info.copyFunction = copyFunction;
map->insert(D3D11VertexFormatPair(inputFormat, info));
map->insert(D3D11VertexFormatPair(formatType, info));
}
static void AddIntegerVertexFormatInfo(D3D11VertexFormatInfoMap *map, GLenum inputType, GLuint componentCount,
VertexConversionType conversionType, DXGI_FORMAT nativeFormat, VertexCopyFunction copyFunction)
static void AddIntegerVertexFormatInfo(D3D11VertexFormatInfoMap *map,
GLenum inputType,
GLuint componentCount,
VertexConversionType conversionType,
DXGI_FORMAT nativeFormat,
VertexCopyFunction copyFunction)
{
gl::VertexFormat inputFormat(inputType, GL_FALSE, componentCount, true);
gl::VertexFormatType formatType = gl::GetVertexFormatType(inputType, GL_FALSE, componentCount, true);
VertexFormat info;
info.conversionType = conversionType;
info.nativeFormat = nativeFormat;
info.copyFunction = copyFunction;
map->insert(D3D11VertexFormatPair(inputFormat, info));
map->insert(D3D11VertexFormatPair(formatType, info));
}
static D3D11VertexFormatInfoMap BuildD3D11_FL9_3VertexFormatInfoOverrideMap()
......@@ -1354,7 +1363,7 @@ static D3D11VertexFormatInfoMap BuildD3D11VertexFormatInfoMap()
return map;
}
const VertexFormat &GetVertexFormatInfo(const gl::VertexFormat &vertexFormat, D3D_FEATURE_LEVEL featureLevel)
const VertexFormat &GetVertexFormatInfo(gl::VertexFormatType vertexFormatType, D3D_FEATURE_LEVEL featureLevel)
{
static const D3D11VertexFormatInfoMap vertexFormatMap = BuildD3D11VertexFormatInfoMap();
static const D3D11VertexFormatInfoMap vertexFormatMapFL9_3Override = BuildD3D11_FL9_3VertexFormatInfoOverrideMap();
......@@ -1362,14 +1371,14 @@ const VertexFormat &GetVertexFormatInfo(const gl::VertexFormat &vertexFormat, D3
if (featureLevel == D3D_FEATURE_LEVEL_9_3)
{
// First see if the format has a special mapping for FL9_3
D3D11VertexFormatInfoMap::const_iterator iter = vertexFormatMapFL9_3Override.find(vertexFormat);
D3D11VertexFormatInfoMap::const_iterator iter = vertexFormatMapFL9_3Override.find(vertexFormatType);
if (iter != vertexFormatMapFL9_3Override.end())
{
return iter->second;
}
}
D3D11VertexFormatInfoMap::const_iterator iter = vertexFormatMap.find(vertexFormat);
D3D11VertexFormatInfoMap::const_iterator iter = vertexFormatMap.find(vertexFormatType);
if (iter != vertexFormatMap.end())
{
return iter->second;
......
......@@ -10,12 +10,12 @@
#ifndef LIBANGLE_RENDERER_D3D_D3D11_FORMATUTILS11_H_
#define LIBANGLE_RENDERER_D3D_D3D11_FORMATUTILS11_H_
#include "libANGLE/renderer/d3d/formatutilsD3D.h"
#include "libANGLE/angletypes.h"
#include <map>
#include "common/platform.h"
#include <map>
#include "libANGLE/angletypes.h"
#include "libANGLE/formatutils.h"
#include "libANGLE/renderer/d3d/formatutilsD3D.h"
namespace rx
{
......@@ -89,7 +89,7 @@ struct VertexFormat
DXGI_FORMAT nativeFormat;
VertexCopyFunction copyFunction;
};
const VertexFormat &GetVertexFormatInfo(const gl::VertexFormat &vertexFormat, D3D_FEATURE_LEVEL featureLevel);
const VertexFormat &GetVertexFormatInfo(gl::VertexFormatType vertexFormatType, D3D_FEATURE_LEVEL featureLevel);
}
......
......@@ -1824,13 +1824,16 @@ gl::Error Renderer9::getCountingIB(size_t count, StaticIndexBufferInterface **ou
return gl::Error(GL_NO_ERROR);
}
gl::Error Renderer9::applyShaders(gl::Program *program, const gl::VertexFormat inputLayout[], const gl::Framebuffer *framebuffer,
bool rasterizerDiscard, bool transformFeedbackActive)
gl::Error Renderer9::applyShaders(gl::Program *program,
const gl::Framebuffer *framebuffer,
bool rasterizerDiscard,
bool transformFeedbackActive)
{
ASSERT(!transformFeedbackActive);
ASSERT(!rasterizerDiscard);
ProgramD3D *programD3D = GetImplAs<ProgramD3D>(program);
const auto &inputLayout = programD3D->getCachedInputLayout();
ShaderExecutableD3D *vertexExe = NULL;
gl::Error error = programD3D->getVertexExecutableForInputLayout(inputLayout, &vertexExe, nullptr);
......@@ -2927,14 +2930,14 @@ bool Renderer9::getLUID(LUID *adapterLuid) const
return false;
}
VertexConversionType Renderer9::getVertexConversionType(const gl::VertexFormat &vertexFormat) const
VertexConversionType Renderer9::getVertexConversionType(gl::VertexFormatType vertexFormatType) const
{
return d3d9::GetVertexFormatInfo(getCapsDeclTypes(), vertexFormat).conversionType;
return d3d9::GetVertexFormatInfo(getCapsDeclTypes(), vertexFormatType).conversionType;
}
GLenum Renderer9::getVertexComponentType(const gl::VertexFormat &vertexFormat) const
GLenum Renderer9::getVertexComponentType(gl::VertexFormatType vertexFormatType) const
{
return d3d9::GetVertexFormatInfo(getCapsDeclTypes(), vertexFormat).componentType;
return d3d9::GetVertexFormatInfo(getCapsDeclTypes(), vertexFormatType).componentType;
}
void Renderer9::generateCaps(gl::Caps *outCaps, gl::TextureCapsMap *outTextureCaps, gl::Extensions *outExtensions) const
......
......@@ -106,8 +106,10 @@ class Renderer9 : public RendererD3D
gl::Error applyRenderTarget(const gl::Framebuffer *frameBuffer) override;
gl::Error applyRenderTarget(const gl::FramebufferAttachment *colorAttachment,
const gl::FramebufferAttachment *depthStencilAttachment);
virtual gl::Error applyShaders(gl::Program *program, const gl::VertexFormat inputLayout[], const gl::Framebuffer *framebuffer,
bool rasterizerDiscard, bool transformFeedbackActive);
gl::Error applyShaders(gl::Program *program,
const gl::Framebuffer *framebuffer,
bool rasterizerDiscard,
bool transformFeedbackActive) override;
virtual gl::Error applyUniforms(const ProgramImpl &program, const std::vector<gl::LinkedUniform*> &uniformArray);
virtual bool applyPrimitiveType(GLenum primitiveType, GLsizei elementCount, bool usesPointSize);
virtual gl::Error applyVertexBuffer(const gl::State &state, GLenum mode, GLint first, GLsizei count, GLsizei instances, SourceIndexData *sourceInfo);
......@@ -225,8 +227,8 @@ class Renderer9 : public RendererD3D
D3DPOOL getTexturePool(DWORD usage) const;
bool getLUID(LUID *adapterLuid) const override;
virtual VertexConversionType getVertexConversionType(const gl::VertexFormat &vertexFormat) const;
virtual GLenum getVertexComponentType(const gl::VertexFormat &vertexFormat) const;
VertexConversionType getVertexConversionType(gl::VertexFormatType vertexFormatType) const override;
GLenum getVertexComponentType(gl::VertexFormatType vertexFormatType) const override;
gl::Error copyToRenderTarget(IDirect3DSurface9 *dest, IDirect3DSurface9 *source, bool fromManaged);
......
......@@ -96,8 +96,8 @@ gl::Error VertexBuffer9::storeVertexAttributes(const gl::VertexAttribute &attrib
input += inputStride * start;
}
gl::VertexFormat vertexFormat(attrib, currentValueType);
const d3d9::VertexFormat &d3dVertexInfo = d3d9::GetVertexFormatInfo(mRenderer->getCapsDeclTypes(), vertexFormat);
gl::VertexFormatType vertexFormatType = gl::GetVertexFormatType(attrib, currentValueType);
const d3d9::VertexFormat &d3dVertexInfo = d3d9::GetVertexFormatInfo(mRenderer->getCapsDeclTypes(), vertexFormatType);
bool needsConversion = (d3dVertexInfo.conversionType & VERTEX_CONVERT_CPU) > 0;
if (!needsConversion && inputStride == elementSize)
......@@ -171,8 +171,8 @@ IDirect3DVertexBuffer9 * VertexBuffer9::getBuffer() const
gl::Error VertexBuffer9::spaceRequired(const gl::VertexAttribute &attrib, std::size_t count, GLsizei instances,
unsigned int *outSpaceRequired) const
{
gl::VertexFormat vertexFormat(attrib, GL_FLOAT);
const d3d9::VertexFormat &d3d9VertexInfo = d3d9::GetVertexFormatInfo(mRenderer->getCapsDeclTypes(), vertexFormat);
gl::VertexFormatType vertexFormatType = gl::GetVertexFormatType(attrib, GL_FLOAT);
const d3d9::VertexFormat &d3d9VertexInfo = d3d9::GetVertexFormatInfo(mRenderer->getCapsDeclTypes(), vertexFormatType);
if (attrib.enabled)
{
......
......@@ -7,10 +7,12 @@
// VertexDeclarationCache.cpp: Implements a helper class to construct and cache vertex declarations.
#include "libANGLE/renderer/d3d/d3d9/VertexDeclarationCache.h"
#include "libANGLE/renderer/d3d/d3d9/VertexBuffer9.h"
#include "libANGLE/renderer/d3d/d3d9/formatutils9.h"
#include "libANGLE/Program.h"
#include "libANGLE/VertexAttribute.h"
#include "libANGLE/formatutils.h"
#include "libANGLE/renderer/d3d/d3d9/VertexBuffer9.h"
#include "libANGLE/renderer/d3d/d3d9/formatutils9.h"
namespace rx
{
......@@ -154,8 +156,8 @@ gl::Error VertexDeclarationCache::applyDeclaration(IDirect3DDevice9 *device,
mAppliedVBs[stream].offset = attributes[i].offset;
}
gl::VertexFormat vertexFormat(*attributes[i].attribute, GL_FLOAT);
const d3d9::VertexFormat &d3d9VertexInfo = d3d9::GetVertexFormatInfo(caps.DeclTypes, vertexFormat);
gl::VertexFormatType vertexformatType = gl::GetVertexFormatType(*attributes[i].attribute, GL_FLOAT);
const d3d9::VertexFormat &d3d9VertexInfo = d3d9::GetVertexFormatInfo(caps.DeclTypes, vertexformatType);
element->Stream = static_cast<WORD>(stream);
element->Offset = 0;
......
......@@ -555,7 +555,7 @@ static inline unsigned int ComputeTypeIndex(GLenum type)
}
}
const VertexFormat &GetVertexFormatInfo(DWORD supportedDeclTypes, const gl::VertexFormat &vertexFormat)
const VertexFormat &GetVertexFormatInfo(DWORD supportedDeclTypes, gl::VertexFormatType vertexFormatType)
{
static bool initialized = false;
static DWORD initializedDeclTypes = 0;
......@@ -592,9 +592,11 @@ const VertexFormat &GetVertexFormatInfo(DWORD supportedDeclTypes, const gl::Vert
initializedDeclTypes = supportedDeclTypes;
}
const gl::VertexFormat &vertexFormat = gl::GetVertexFormatFromType(vertexFormatType);
// Pure integer attributes only supported in ES3.0
ASSERT(!vertexFormat.mPureInteger);
return formatConverters[ComputeTypeIndex(vertexFormat.mType)][vertexFormat.mNormalized][vertexFormat.mComponents - 1];
ASSERT(!vertexFormat.pureInteger);
return formatConverters[ComputeTypeIndex(vertexFormat.type)][vertexFormat.normalized][vertexFormat.components - 1];
}
}
......
......@@ -10,12 +10,12 @@
#ifndef LIBANGLE_RENDERER_D3D_D3D9_FORMATUTILS9_H_
#define LIBANGLE_RENDERER_D3D_D3D9_FORMATUTILS9_H_
#include "libANGLE/renderer/d3d/formatutilsD3D.h"
#include "libANGLE/angletypes.h"
#include <map>
#include "common/platform.h"
#include <map>
#include "libANGLE/angletypes.h"
#include "libANGLE/formatutils.h"
#include "libANGLE/renderer/d3d/formatutilsD3D.h"
namespace rx
{
......@@ -64,7 +64,7 @@ struct VertexFormat
D3DDECLTYPE nativeFormat;
GLenum componentType;
};
const VertexFormat &GetVertexFormatInfo(DWORD supportedDeclTypes, const gl::VertexFormat &vertexFormat);
const VertexFormat &GetVertexFormatInfo(DWORD supportedDeclTypes, gl::VertexFormatType vertexFormatType);
struct TextureFormat
{
......
......@@ -54,8 +54,8 @@ class MockBufferFactoryD3D : public rx::BufferFactoryD3D
}
MOCK_METHOD0(createVertexBuffer, rx::VertexBuffer*());
MOCK_CONST_METHOD1(getVertexConversionType, rx::VertexConversionType(const gl::VertexFormat &));
MOCK_CONST_METHOD1(getVertexComponentType, GLenum(const gl::VertexFormat &));
MOCK_CONST_METHOD1(getVertexConversionType, rx::VertexConversionType(gl::VertexFormatType));
MOCK_CONST_METHOD1(getVertexComponentType, GLenum(gl::VertexFormatType));
// Dependency injection
rx::IndexBuffer* createIndexBuffer() override
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment