Commit eae262e7 by Shahbaz Youssefi Committed by Commit Bot

Vulkan: Fix image layout barriers for tessellation shaders

Also fixes a bug where invalid stages may be specified for example if AllGraphicsReadOnly or DepthStencilReadOnly layouts are used and geometry or tessellation shaders are not supported by the implementation. Bug: angleproject:5557 Change-Id: Ia25a6aec8138c67701c63da65783263d8a7bda27 Reviewed-on: https://chromium-review.googlesource.com/c/angle/angle/+/2653911 Commit-Queue: Shahbaz Youssefi <syoussefi@chromium.org> Reviewed-by: 's avatarJamie Madill <jmadill@chromium.org> Reviewed-by: 's avatarTim Van Patten <timvp@google.com>
parent 6d86a0fe
...@@ -121,14 +121,18 @@ GLenum DefaultGLErrorCode(VkResult result) ...@@ -121,14 +121,18 @@ GLenum DefaultGLErrorCode(VkResult result)
constexpr gl::ShaderMap<vk::ImageLayout> kShaderReadOnlyImageLayouts = { constexpr gl::ShaderMap<vk::ImageLayout> kShaderReadOnlyImageLayouts = {
{gl::ShaderType::Vertex, vk::ImageLayout::VertexShaderReadOnly}, {gl::ShaderType::Vertex, vk::ImageLayout::VertexShaderReadOnly},
{gl::ShaderType::TessControl, vk::ImageLayout::PreFragmentShadersReadOnly},
{gl::ShaderType::TessEvaluation, vk::ImageLayout::PreFragmentShadersReadOnly},
{gl::ShaderType::Geometry, vk::ImageLayout::PreFragmentShadersReadOnly},
{gl::ShaderType::Fragment, vk::ImageLayout::FragmentShaderReadOnly}, {gl::ShaderType::Fragment, vk::ImageLayout::FragmentShaderReadOnly},
{gl::ShaderType::Geometry, vk::ImageLayout::GeometryShaderReadOnly},
{gl::ShaderType::Compute, vk::ImageLayout::ComputeShaderReadOnly}}; {gl::ShaderType::Compute, vk::ImageLayout::ComputeShaderReadOnly}};
constexpr gl::ShaderMap<vk::ImageLayout> kShaderWriteImageLayouts = { constexpr gl::ShaderMap<vk::ImageLayout> kShaderWriteImageLayouts = {
{gl::ShaderType::Vertex, vk::ImageLayout::VertexShaderWrite}, {gl::ShaderType::Vertex, vk::ImageLayout::VertexShaderWrite},
{gl::ShaderType::TessControl, vk::ImageLayout::PreFragmentShadersWrite},
{gl::ShaderType::TessEvaluation, vk::ImageLayout::PreFragmentShadersWrite},
{gl::ShaderType::Geometry, vk::ImageLayout::PreFragmentShadersWrite},
{gl::ShaderType::Fragment, vk::ImageLayout::FragmentShaderWrite}, {gl::ShaderType::Fragment, vk::ImageLayout::FragmentShaderWrite},
{gl::ShaderType::Geometry, vk::ImageLayout::GeometryShaderWrite},
{gl::ShaderType::Compute, vk::ImageLayout::ComputeShaderWrite}}; {gl::ShaderType::Compute, vk::ImageLayout::ComputeShaderWrite}};
constexpr VkBufferUsageFlags kVertexBufferUsage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT; constexpr VkBufferUsageFlags kVertexBufferUsage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
...@@ -1266,12 +1270,18 @@ ANGLE_INLINE angle::Result ContextVk::handleDirtyTexturesImpl( ...@@ -1266,12 +1270,18 @@ ANGLE_INLINE angle::Result ContextVk::handleDirtyTexturesImpl(
executable->getSamplerShaderBitsForTextureUnitIndex(textureUnit); executable->getSamplerShaderBitsForTextureUnitIndex(textureUnit);
ASSERT(remainingShaderBits.any()); ASSERT(remainingShaderBits.any());
gl::ShaderType firstShader = remainingShaderBits.first(); gl::ShaderType firstShader = remainingShaderBits.first();
gl::ShaderType lastShader = remainingShaderBits.last();
remainingShaderBits.reset(firstShader); remainingShaderBits.reset(firstShader);
// If we have multiple shader accessing it, we barrier against all shader stage read remainingShaderBits.reset(lastShader);
// given that we only support vertex/frag shaders // We barrier against either:
if (remainingShaderBits.any()) // - Vertex only
// - Fragment only
// - Pre-fragment only (vertex, geometry and tessellation together)
if (remainingShaderBits.any() || firstShader != lastShader)
{ {
textureLayout = vk::ImageLayout::AllGraphicsShadersReadOnly; textureLayout = lastShader == gl::ShaderType::Fragment
? vk::ImageLayout::AllGraphicsShadersReadOnly
: vk::ImageLayout::PreFragmentShadersReadOnly;
} }
else else
{ {
...@@ -1279,8 +1289,7 @@ ANGLE_INLINE angle::Result ContextVk::handleDirtyTexturesImpl( ...@@ -1279,8 +1289,7 @@ ANGLE_INLINE angle::Result ContextVk::handleDirtyTexturesImpl(
} }
} }
// Ensure the image is in read-only layout // Ensure the image is in read-only layout
commandBufferHelper->imageRead(&mResourceUseList, image.getAspectFlags(), textureLayout, commandBufferHelper->imageRead(this, image.getAspectFlags(), textureLayout, &image);
&image);
textureVk->retainImageViews(&mResourceUseList); textureVk->retainImageViews(&mResourceUseList);
} }
...@@ -4284,16 +4293,23 @@ angle::Result ContextVk::updateActiveImages(const gl::Context *context, ...@@ -4284,16 +4293,23 @@ angle::Result ContextVk::updateActiveImages(const gl::Context *context,
alreadyProcessed.insert(image); alreadyProcessed.insert(image);
vk::ImageLayout imageLayout; vk::ImageLayout imageLayout;
gl::ShaderType shader = static_cast<gl::ShaderType>(gl::ScanForward(shaderStages.bits())); gl::ShaderType firstShader = shaderStages.first();
shaderStages.reset(shader); gl::ShaderType lastShader = shaderStages.last();
// This is accessed by multiple shaders shaderStages.reset(firstShader);
if (shaderStages.any()) shaderStages.reset(lastShader);
// We barrier against either:
// - Vertex only
// - Fragment only
// - Pre-fragment only (vertex, geometry and tessellation together)
if (shaderStages.any() || firstShader != lastShader)
{ {
imageLayout = vk::ImageLayout::AllGraphicsShadersWrite; imageLayout = lastShader == gl::ShaderType::Fragment
? vk::ImageLayout::AllGraphicsShadersWrite
: vk::ImageLayout::PreFragmentShadersWrite;
} }
else else
{ {
imageLayout = kShaderWriteImageLayouts[shader]; imageLayout = kShaderWriteImageLayouts[firstShader];
} }
VkImageAspectFlags aspectFlags = image->getAspectFlags(); VkImageAspectFlags aspectFlags = image->getAspectFlags();
...@@ -4307,8 +4323,8 @@ angle::Result ContextVk::updateActiveImages(const gl::Context *context, ...@@ -4307,8 +4323,8 @@ angle::Result ContextVk::updateActiveImages(const gl::Context *context,
} }
commandBufferHelper->imageWrite( commandBufferHelper->imageWrite(
&mResourceUseList, gl::LevelIndex(static_cast<uint32_t>(imageUnit.level)), layerStart, this, gl::LevelIndex(static_cast<uint32_t>(imageUnit.level)), layerStart, layerCount,
layerCount, aspectFlags, imageLayout, vk::AliasingMode::Allowed, image); aspectFlags, imageLayout, vk::AliasingMode::Allowed, image);
} }
return angle::Result::Continue; return angle::Result::Continue;
...@@ -5073,7 +5089,7 @@ angle::Result ContextVk::onResourceAccess(const vk::CommandBufferAccess &access) ...@@ -5073,7 +5089,7 @@ angle::Result ContextVk::onResourceAccess(const vk::CommandBufferAccess &access)
{ {
ASSERT(!IsRenderPassStartedAndUsesImage(*mRenderPassCommands, *imageAccess.image)); ASSERT(!IsRenderPassStartedAndUsesImage(*mRenderPassCommands, *imageAccess.image));
imageAccess.image->recordReadBarrier(imageAccess.aspectFlags, imageAccess.imageLayout, imageAccess.image->recordReadBarrier(this, imageAccess.aspectFlags, imageAccess.imageLayout,
&mOutsideRenderPassCommands->getCommandBuffer()); &mOutsideRenderPassCommands->getCommandBuffer());
imageAccess.image->retain(&mResourceUseList); imageAccess.image->retain(&mResourceUseList);
} }
...@@ -5083,7 +5099,7 @@ angle::Result ContextVk::onResourceAccess(const vk::CommandBufferAccess &access) ...@@ -5083,7 +5099,7 @@ angle::Result ContextVk::onResourceAccess(const vk::CommandBufferAccess &access)
ASSERT(!IsRenderPassStartedAndUsesImage(*mRenderPassCommands, *imageWrite.access.image)); ASSERT(!IsRenderPassStartedAndUsesImage(*mRenderPassCommands, *imageWrite.access.image));
imageWrite.access.image->recordWriteBarrier( imageWrite.access.image->recordWriteBarrier(
imageWrite.access.aspectFlags, imageWrite.access.imageLayout, this, imageWrite.access.aspectFlags, imageWrite.access.imageLayout,
&mOutsideRenderPassCommands->getCommandBuffer()); &mOutsideRenderPassCommands->getCommandBuffer());
imageWrite.access.image->retain(&mResourceUseList); imageWrite.access.image->retain(&mResourceUseList);
imageWrite.access.image->onWrite(imageWrite.levelStart, imageWrite.levelCount, imageWrite.access.image->onWrite(imageWrite.levelStart, imageWrite.levelCount,
......
...@@ -447,7 +447,7 @@ class ContextVk : public ContextImpl, public vk::Context, public MultisampleText ...@@ -447,7 +447,7 @@ class ContextVk : public ContextImpl, public vk::Context, public MultisampleText
vk::ImageHelper *image) vk::ImageHelper *image)
{ {
ASSERT(mRenderPassCommands->started()); ASSERT(mRenderPassCommands->started());
mRenderPassCommands->imageRead(&mResourceUseList, aspectFlags, imageLayout, image); mRenderPassCommands->imageRead(this, aspectFlags, imageLayout, image);
} }
void onImageRenderPassWrite(gl::LevelIndex level, void onImageRenderPassWrite(gl::LevelIndex level,
...@@ -458,8 +458,8 @@ class ContextVk : public ContextImpl, public vk::Context, public MultisampleText ...@@ -458,8 +458,8 @@ class ContextVk : public ContextImpl, public vk::Context, public MultisampleText
vk::ImageHelper *image) vk::ImageHelper *image)
{ {
ASSERT(mRenderPassCommands->started()); ASSERT(mRenderPassCommands->started());
mRenderPassCommands->imageWrite(&mResourceUseList, level, layerStart, layerCount, mRenderPassCommands->imageWrite(this, level, layerStart, layerCount, aspectFlags,
aspectFlags, imageLayout, vk::AliasingMode::Allowed, image); imageLayout, vk::AliasingMode::Allowed, image);
} }
void onDepthStencilDraw(gl::LevelIndex level, void onDepthStencilDraw(gl::LevelIndex level,
...@@ -477,7 +477,7 @@ class ContextVk : public ContextImpl, public vk::Context, public MultisampleText ...@@ -477,7 +477,7 @@ class ContextVk : public ContextImpl, public vk::Context, public MultisampleText
{ {
if (mRenderPassCommands->started()) if (mRenderPassCommands->started())
{ {
mRenderPassCommands->onImageHelperRelease(image); mRenderPassCommands->onImageHelperRelease(this, image);
} }
} }
......
...@@ -202,7 +202,7 @@ angle::Result RenderbufferVk::setStorageEGLImageTarget(const gl::Context *contex ...@@ -202,7 +202,7 @@ angle::Result RenderbufferVk::setStorageEGLImageTarget(const gl::Context *contex
{ {
vk::CommandBuffer *commandBuffer; vk::CommandBuffer *commandBuffer;
ANGLE_TRY(contextVk->getOutsideRenderPassCommandBuffer({}, &commandBuffer)); ANGLE_TRY(contextVk->getOutsideRenderPassCommandBuffer({}, &commandBuffer));
mImage->changeLayoutAndQueue(aspect, vk::ImageLayout::ColorAttachment, mImage->changeLayoutAndQueue(contextVk, aspect, vk::ImageLayout::ColorAttachment,
rendererQueueFamilyIndex, commandBuffer); rendererQueueFamilyIndex, commandBuffer);
} }
......
...@@ -625,7 +625,8 @@ RendererVk::RendererVk() ...@@ -625,7 +625,8 @@ RendererVk::RendererVk()
mPipelineCacheDirty(false), mPipelineCacheDirty(false),
mPipelineCacheInitialized(false), mPipelineCacheInitialized(false),
mCommandProcessor(this), mCommandProcessor(this),
mGlslangInitialized(false) mGlslangInitialized(false),
mSupportedVulkanPipelineStageMask(0)
{ {
VkFormatProperties invalid = {0, 0, kInvalidFormatFeatureFlags}; VkFormatProperties invalid = {0, 0, kInvalidFormatFeatureFlags};
mFormatProperties.fill(invalid); mFormatProperties.fill(invalid);
...@@ -1719,6 +1720,21 @@ angle::Result RendererVk::initializeDevice(DisplayVk *displayVk, uint32_t queueF ...@@ -1719,6 +1720,21 @@ angle::Result RendererVk::initializeDevice(DisplayVk *displayVk, uint32_t queueF
ANGLE_TRY(initPipelineCache(displayVk, &mPipelineCache, &success)); ANGLE_TRY(initPipelineCache(displayVk, &mPipelineCache, &success));
} }
// Track the set of supported pipeline stages. This is used when issuing image layout
// transitions that cover many stages (such as AllGraphicsReadOnly) to mask out unsupported
// stages, which avoids enumerating every possible combination of stages in the layouts.
VkPipelineStageFlags unsupportedStages = 0;
if (!mPhysicalDeviceFeatures.tessellationShader)
{
unsupportedStages |= VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT;
}
if (!mPhysicalDeviceFeatures.geometryShader)
{
unsupportedStages |= VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT;
}
mSupportedVulkanPipelineStageMask = ~unsupportedStages;
return angle::Result::Continue; return angle::Result::Continue;
} }
......
...@@ -363,6 +363,11 @@ class RendererVk : angle::NonCopyable ...@@ -363,6 +363,11 @@ class RendererVk : angle::NonCopyable
// Log cache stats for all caches // Log cache stats for all caches
void logCacheStats() const; void logCacheStats() const;
VkPipelineStageFlags getSupportedVulkanPipelineStageMask() const
{
return mSupportedVulkanPipelineStageMask;
}
private: private:
angle::Result initializeDevice(DisplayVk *displayVk, uint32_t queueFamilyIndex); angle::Result initializeDevice(DisplayVk *displayVk, uint32_t queueFamilyIndex);
void ensureCapsInitialized() const; void ensureCapsInitialized() const;
...@@ -492,6 +497,17 @@ class RendererVk : angle::NonCopyable ...@@ -492,6 +497,17 @@ class RendererVk : angle::NonCopyable
// Stats about all Vulkan object caches // Stats about all Vulkan object caches
using VulkanCacheStats = angle::PackedEnumMap<VulkanCacheType, CacheStats>; using VulkanCacheStats = angle::PackedEnumMap<VulkanCacheType, CacheStats>;
VulkanCacheStats mVulkanCacheStats; VulkanCacheStats mVulkanCacheStats;
// A mask to filter out Vulkan pipeline stages that are not supported, applied in situations
// where multiple stages are prespecified (for example with image layout transitions):
//
// - Excludes GEOMETRY if geometry shaders are not supported.
// - Excludes TESSELLATION_CONTROL and TESSELLATION_EVALUATION if tessellation shaders are not
// supported.
//
// Note that this mask can have bits set that don't correspond to valid stages, so it's strictly
// only useful for masking out unsupported stages in an otherwise valid set of stages.
VkPipelineStageFlags mSupportedVulkanPipelineStageMask;
}; };
} // namespace rx } // namespace rx
......
...@@ -1336,7 +1336,7 @@ angle::Result WindowSurfaceVk::present(ContextVk *contextVk, ...@@ -1336,7 +1336,7 @@ angle::Result WindowSurfaceVk::present(ContextVk *contextVk,
} }
// This does nothing if it's already in the requested layout // This does nothing if it's already in the requested layout
image.image.recordReadBarrier(VK_IMAGE_ASPECT_COLOR_BIT, vk::ImageLayout::Present, image.image.recordReadBarrier(contextVk, VK_IMAGE_ASPECT_COLOR_BIT, vk::ImageLayout::Present,
commandBuffer); commandBuffer);
// Knowing that the kSwapHistorySize'th submission ago has finished, we can know that the // Knowing that the kSwapHistorySize'th submission ago has finished, we can know that the
......
...@@ -1366,8 +1366,8 @@ angle::Result TextureVk::setEGLImageTarget(const gl::Context *context, ...@@ -1366,8 +1366,8 @@ angle::Result TextureVk::setEGLImageTarget(const gl::Context *context,
vk::CommandBuffer *commandBuffer; vk::CommandBuffer *commandBuffer;
ANGLE_TRY(contextVk->getOutsideRenderPassCommandBuffer({}, &commandBuffer)); ANGLE_TRY(contextVk->getOutsideRenderPassCommandBuffer({}, &commandBuffer));
mImage->changeLayoutAndQueue(mImage->getAspectFlags(), newLayout, rendererQueueFamilyIndex, mImage->changeLayoutAndQueue(contextVk, mImage->getAspectFlags(), newLayout,
commandBuffer); rendererQueueFamilyIndex, commandBuffer);
} }
return angle::Result::Continue; return angle::Result::Continue;
......
...@@ -101,8 +101,12 @@ struct ImageMemoryBarrierData ...@@ -101,8 +101,12 @@ struct ImageMemoryBarrierData
PipelineStage barrierIndex; PipelineStage barrierIndex;
}; };
constexpr VkPipelineStageFlags kPreFragmentStageFlags =
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT;
constexpr VkPipelineStageFlags kAllShadersPipelineStageFlags = constexpr VkPipelineStageFlags kAllShadersPipelineStageFlags =
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | kPreFragmentStageFlags | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
constexpr VkPipelineStageFlags kAllDepthStencilPipelineStageFlags = constexpr VkPipelineStageFlags kAllDepthStencilPipelineStageFlags =
...@@ -314,33 +318,35 @@ constexpr angle::PackedEnumMap<ImageLayout, ImageMemoryBarrierData> kImageMemory ...@@ -314,33 +318,35 @@ constexpr angle::PackedEnumMap<ImageLayout, ImageMemoryBarrierData> kImageMemory
}, },
}, },
{ {
ImageLayout::GeometryShaderReadOnly, ImageLayout::PreFragmentShadersReadOnly,
ImageMemoryBarrierData{ ImageMemoryBarrierData{
"GeometryShaderReadOnly", "PreFragmentShadersReadOnly",
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, kPreFragmentStageFlags,
VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, kPreFragmentStageFlags,
// Transition to: all reads must happen after barrier. // Transition to: all reads must happen after barrier.
VK_ACCESS_SHADER_READ_BIT, VK_ACCESS_SHADER_READ_BIT,
// Transition from: RAR and WAR don't need memory barrier. // Transition from: RAR and WAR don't need memory barrier.
0, 0,
ResourceAccess::ReadOnly, ResourceAccess::ReadOnly,
PipelineStage::GeometryShader, // In case of multiple destination stages, We barrier the earliest stage
PipelineStage::VertexShader,
}, },
}, },
{ {
ImageLayout::GeometryShaderWrite, ImageLayout::PreFragmentShadersWrite,
ImageMemoryBarrierData{ ImageMemoryBarrierData{
"GeometryShaderWrite", "PreFragmentShadersWrite",
VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, kPreFragmentStageFlags,
VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, kPreFragmentStageFlags,
// Transition to: all reads and writes must happen after barrier. // Transition to: all reads and writes must happen after barrier.
VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT,
// Transition from: all writes must finish before barrier. // Transition from: all writes must finish before barrier.
VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
ResourceAccess::Write, ResourceAccess::Write,
PipelineStage::GeometryShader, // In case of multiple destination stages, We barrier the earliest stage
PipelineStage::VertexShader,
}, },
}, },
{ {
...@@ -438,6 +444,18 @@ constexpr angle::PackedEnumMap<ImageLayout, ImageMemoryBarrierData> kImageMemory ...@@ -438,6 +444,18 @@ constexpr angle::PackedEnumMap<ImageLayout, ImageMemoryBarrierData> kImageMemory
}; };
// clang-format on // clang-format on
VkPipelineStageFlags GetImageLayoutSrcStageMask(Context *context,
const ImageMemoryBarrierData &transition)
{
return transition.srcStageMask & context->getRenderer()->getSupportedVulkanPipelineStageMask();
}
VkPipelineStageFlags GetImageLayoutDstStageMask(Context *context,
const ImageMemoryBarrierData &transition)
{
return transition.dstStageMask & context->getRenderer()->getSupportedVulkanPipelineStageMask();
}
VkImageCreateFlags GetImageCreateFlags(gl::TextureType textureType) VkImageCreateFlags GetImageCreateFlags(gl::TextureType textureType)
{ {
switch (textureType) switch (textureType)
...@@ -846,19 +864,19 @@ void CommandBufferHelper::bufferWrite(ResourceUseList *resourceUseList, ...@@ -846,19 +864,19 @@ void CommandBufferHelper::bufferWrite(ResourceUseList *resourceUseList,
} }
} }
void CommandBufferHelper::imageRead(ResourceUseList *resourceUseList, void CommandBufferHelper::imageRead(ContextVk *contextVk,
VkImageAspectFlags aspectFlags, VkImageAspectFlags aspectFlags,
ImageLayout imageLayout, ImageLayout imageLayout,
ImageHelper *image) ImageHelper *image)
{ {
image->retain(resourceUseList); image->retain(&contextVk->getResourceUseList());
if (image->isReadBarrierNecessary(imageLayout)) if (image->isReadBarrierNecessary(imageLayout))
{ {
PipelineStage barrierIndex = kImageMemoryBarrierData[imageLayout].barrierIndex; PipelineStage barrierIndex = kImageMemoryBarrierData[imageLayout].barrierIndex;
ASSERT(barrierIndex != PipelineStage::InvalidEnum); ASSERT(barrierIndex != PipelineStage::InvalidEnum);
PipelineBarrier *barrier = &mPipelineBarriers[barrierIndex]; PipelineBarrier *barrier = &mPipelineBarriers[barrierIndex];
if (image->updateLayoutAndBarrier(aspectFlags, imageLayout, barrier)) if (image->updateLayoutAndBarrier(contextVk, aspectFlags, imageLayout, barrier))
{ {
mPipelineBarrierMask.set(barrierIndex); mPipelineBarrierMask.set(barrierIndex);
} }
...@@ -875,7 +893,7 @@ void CommandBufferHelper::imageRead(ResourceUseList *resourceUseList, ...@@ -875,7 +893,7 @@ void CommandBufferHelper::imageRead(ResourceUseList *resourceUseList,
} }
} }
void CommandBufferHelper::imageWrite(ResourceUseList *resourceUseList, void CommandBufferHelper::imageWrite(ContextVk *contextVk,
gl::LevelIndex level, gl::LevelIndex level,
uint32_t layerStart, uint32_t layerStart,
uint32_t layerCount, uint32_t layerCount,
...@@ -884,13 +902,13 @@ void CommandBufferHelper::imageWrite(ResourceUseList *resourceUseList, ...@@ -884,13 +902,13 @@ void CommandBufferHelper::imageWrite(ResourceUseList *resourceUseList,
AliasingMode aliasingMode, AliasingMode aliasingMode,
ImageHelper *image) ImageHelper *image)
{ {
image->retain(resourceUseList); image->retain(&contextVk->getResourceUseList());
image->onWrite(level, 1, layerStart, layerCount, aspectFlags); image->onWrite(level, 1, layerStart, layerCount, aspectFlags);
// Write always requires a barrier // Write always requires a barrier
PipelineStage barrierIndex = kImageMemoryBarrierData[imageLayout].barrierIndex; PipelineStage barrierIndex = kImageMemoryBarrierData[imageLayout].barrierIndex;
ASSERT(barrierIndex != PipelineStage::InvalidEnum); ASSERT(barrierIndex != PipelineStage::InvalidEnum);
PipelineBarrier *barrier = &mPipelineBarriers[barrierIndex]; PipelineBarrier *barrier = &mPipelineBarriers[barrierIndex];
if (image->updateLayoutAndBarrier(aspectFlags, imageLayout, barrier)) if (image->updateLayoutAndBarrier(contextVk, aspectFlags, imageLayout, barrier))
{ {
mPipelineBarrierMask.set(barrierIndex); mPipelineBarrierMask.set(barrierIndex);
} }
...@@ -1065,7 +1083,7 @@ void CommandBufferHelper::executeBarriers(const angle::FeaturesVk &features, ...@@ -1065,7 +1083,7 @@ void CommandBufferHelper::executeBarriers(const angle::FeaturesVk &features,
mPipelineBarrierMask.reset(); mPipelineBarrierMask.reset();
} }
void CommandBufferHelper::finalizeDepthStencilImageLayout() void CommandBufferHelper::finalizeDepthStencilImageLayout(Context *context)
{ {
ASSERT(mIsRenderPassCommandBuffer); ASSERT(mIsRenderPassCommandBuffer);
ASSERT(mDepthStencilImage); ASSERT(mDepthStencilImage);
...@@ -1098,7 +1116,7 @@ void CommandBufferHelper::finalizeDepthStencilImageLayout() ...@@ -1098,7 +1116,7 @@ void CommandBufferHelper::finalizeDepthStencilImageLayout()
ASSERT(barrierIndex != PipelineStage::InvalidEnum); ASSERT(barrierIndex != PipelineStage::InvalidEnum);
PipelineBarrier *barrier = &mPipelineBarriers[barrierIndex]; PipelineBarrier *barrier = &mPipelineBarriers[barrierIndex];
if (mDepthStencilImage->updateLayoutAndBarrier(aspectFlags, imageLayout, barrier)) if (mDepthStencilImage->updateLayoutAndBarrier(context, aspectFlags, imageLayout, barrier))
{ {
mPipelineBarrierMask.set(barrierIndex); mPipelineBarrierMask.set(barrierIndex);
} }
...@@ -1127,7 +1145,7 @@ void CommandBufferHelper::finalizeDepthStencilImageLayout() ...@@ -1127,7 +1145,7 @@ void CommandBufferHelper::finalizeDepthStencilImageLayout()
} }
} }
void CommandBufferHelper::finalizeDepthStencilResolveImageLayout() void CommandBufferHelper::finalizeDepthStencilResolveImageLayout(Context *context)
{ {
ASSERT(mIsRenderPassCommandBuffer); ASSERT(mIsRenderPassCommandBuffer);
ASSERT(mDepthStencilImage); ASSERT(mDepthStencilImage);
...@@ -1142,7 +1160,8 @@ void CommandBufferHelper::finalizeDepthStencilResolveImageLayout() ...@@ -1142,7 +1160,8 @@ void CommandBufferHelper::finalizeDepthStencilResolveImageLayout()
ASSERT(barrierIndex != PipelineStage::InvalidEnum); ASSERT(barrierIndex != PipelineStage::InvalidEnum);
PipelineBarrier *barrier = &mPipelineBarriers[barrierIndex]; PipelineBarrier *barrier = &mPipelineBarriers[barrierIndex];
if (mDepthStencilResolveImage->updateLayoutAndBarrier(aspectFlags, imageLayout, barrier)) if (mDepthStencilResolveImage->updateLayoutAndBarrier(context, aspectFlags, imageLayout,
barrier))
{ {
mPipelineBarrierMask.set(barrierIndex); mPipelineBarrierMask.set(barrierIndex);
} }
...@@ -1170,19 +1189,19 @@ void CommandBufferHelper::finalizeDepthStencilResolveImageLayout() ...@@ -1170,19 +1189,19 @@ void CommandBufferHelper::finalizeDepthStencilResolveImageLayout()
} }
} }
void CommandBufferHelper::onImageHelperRelease(const ImageHelper *image) void CommandBufferHelper::onImageHelperRelease(Context *context, const ImageHelper *image)
{ {
ASSERT(mIsRenderPassCommandBuffer); ASSERT(mIsRenderPassCommandBuffer);
if (mDepthStencilImage == image) if (mDepthStencilImage == image)
{ {
finalizeDepthStencilImageLayout(); finalizeDepthStencilImageLayout(context);
mDepthStencilImage = nullptr; mDepthStencilImage = nullptr;
} }
if (mDepthStencilResolveImage == image) if (mDepthStencilResolveImage == image)
{ {
finalizeDepthStencilResolveImageLayout(); finalizeDepthStencilResolveImageLayout(context);
mDepthStencilResolveImage = nullptr; mDepthStencilResolveImage = nullptr;
} }
} }
...@@ -1286,11 +1305,11 @@ void CommandBufferHelper::endRenderPass(ContextVk *contextVk) ...@@ -1286,11 +1305,11 @@ void CommandBufferHelper::endRenderPass(ContextVk *contextVk)
// Do depth stencil layout change. // Do depth stencil layout change.
if (mDepthStencilImage) if (mDepthStencilImage)
{ {
finalizeDepthStencilImageLayout(); finalizeDepthStencilImageLayout(contextVk);
} }
if (mDepthStencilResolveImage) if (mDepthStencilResolveImage)
{ {
finalizeDepthStencilResolveImageLayout(); finalizeDepthStencilResolveImageLayout(contextVk);
} }
} }
...@@ -3703,7 +3722,7 @@ angle::Result ImageHelper::initializeNonZeroMemory(Context *context, VkDeviceSiz ...@@ -3703,7 +3722,7 @@ angle::Result ImageHelper::initializeNonZeroMemory(Context *context, VkDeviceSiz
ANGLE_TRY(renderer->getCommandBufferOneOff(context, &commandBuffer)); ANGLE_TRY(renderer->getCommandBufferOneOff(context, &commandBuffer));
// Queue a DMA copy. // Queue a DMA copy.
barrierImpl(getAspectFlags(), ImageLayout::TransferDst, mCurrentQueueFamilyIndex, barrierImpl(context, getAspectFlags(), ImageLayout::TransferDst, mCurrentQueueFamilyIndex,
&commandBuffer); &commandBuffer);
StagingBuffer stagingBuffer; StagingBuffer stagingBuffer;
...@@ -4169,13 +4188,14 @@ bool ImageHelper::isReadBarrierNecessary(ImageLayout newLayout) const ...@@ -4169,13 +4188,14 @@ bool ImageHelper::isReadBarrierNecessary(ImageLayout newLayout) const
return layoutData.type == ResourceAccess::Write; return layoutData.type == ResourceAccess::Write;
} }
void ImageHelper::changeLayoutAndQueue(VkImageAspectFlags aspectMask, void ImageHelper::changeLayoutAndQueue(Context *context,
VkImageAspectFlags aspectMask,
ImageLayout newLayout, ImageLayout newLayout,
uint32_t newQueueFamilyIndex, uint32_t newQueueFamilyIndex,
CommandBuffer *commandBuffer) CommandBuffer *commandBuffer)
{ {
ASSERT(isQueueChangeNeccesary(newQueueFamilyIndex)); ASSERT(isQueueChangeNeccesary(newQueueFamilyIndex));
barrierImpl(aspectMask, newLayout, newQueueFamilyIndex, commandBuffer); barrierImpl(context, aspectMask, newLayout, newQueueFamilyIndex, commandBuffer);
} }
void ImageHelper::acquireFromExternal(ContextVk *contextVk, void ImageHelper::acquireFromExternal(ContextVk *contextVk,
...@@ -4193,7 +4213,8 @@ void ImageHelper::acquireFromExternal(ContextVk *contextVk, ...@@ -4193,7 +4213,8 @@ void ImageHelper::acquireFromExternal(ContextVk *contextVk,
mCurrentLayout = currentLayout; mCurrentLayout = currentLayout;
mCurrentQueueFamilyIndex = externalQueueFamilyIndex; mCurrentQueueFamilyIndex = externalQueueFamilyIndex;
changeLayoutAndQueue(getAspectFlags(), mCurrentLayout, rendererQueueFamilyIndex, commandBuffer); changeLayoutAndQueue(contextVk, getAspectFlags(), mCurrentLayout, rendererQueueFamilyIndex,
commandBuffer);
// It is unknown how the external has modified the image, so assume every subresource has // It is unknown how the external has modified the image, so assume every subresource has
// defined content. That is unless the layout is Undefined. // defined content. That is unless the layout is Undefined.
...@@ -4215,7 +4236,8 @@ void ImageHelper::releaseToExternal(ContextVk *contextVk, ...@@ -4215,7 +4236,8 @@ void ImageHelper::releaseToExternal(ContextVk *contextVk,
{ {
ASSERT(mCurrentQueueFamilyIndex == rendererQueueFamilyIndex); ASSERT(mCurrentQueueFamilyIndex == rendererQueueFamilyIndex);
changeLayoutAndQueue(getAspectFlags(), desiredLayout, externalQueueFamilyIndex, commandBuffer); changeLayoutAndQueue(contextVk, getAspectFlags(), desiredLayout, externalQueueFamilyIndex,
commandBuffer);
} }
bool ImageHelper::isReleasedToExternal() const bool ImageHelper::isReleasedToExternal() const
...@@ -4272,7 +4294,8 @@ ANGLE_INLINE void ImageHelper::initImageMemoryBarrierStruct( ...@@ -4272,7 +4294,8 @@ ANGLE_INLINE void ImageHelper::initImageMemoryBarrierStruct(
// Generalized to accept both "primary" and "secondary" command buffers. // Generalized to accept both "primary" and "secondary" command buffers.
template <typename CommandBufferT> template <typename CommandBufferT>
void ImageHelper::barrierImpl(VkImageAspectFlags aspectMask, void ImageHelper::barrierImpl(Context *context,
VkImageAspectFlags aspectMask,
ImageLayout newLayout, ImageLayout newLayout,
uint32_t newQueueFamilyIndex, uint32_t newQueueFamilyIndex,
CommandBufferT *commandBuffer) CommandBufferT *commandBuffer)
...@@ -4284,26 +4307,29 @@ void ImageHelper::barrierImpl(VkImageAspectFlags aspectMask, ...@@ -4284,26 +4307,29 @@ void ImageHelper::barrierImpl(VkImageAspectFlags aspectMask,
initImageMemoryBarrierStruct(aspectMask, newLayout, newQueueFamilyIndex, &imageMemoryBarrier); initImageMemoryBarrierStruct(aspectMask, newLayout, newQueueFamilyIndex, &imageMemoryBarrier);
// There might be other shaderRead operations there other than the current layout. // There might be other shaderRead operations there other than the current layout.
VkPipelineStageFlags srcStageMask = transitionFrom.srcStageMask; VkPipelineStageFlags srcStageMask = GetImageLayoutSrcStageMask(context, transitionFrom);
if (mCurrentShaderReadStageMask) if (mCurrentShaderReadStageMask)
{ {
srcStageMask |= mCurrentShaderReadStageMask; srcStageMask |= mCurrentShaderReadStageMask;
mCurrentShaderReadStageMask = 0; mCurrentShaderReadStageMask = 0;
mLastNonShaderReadOnlyLayout = ImageLayout::Undefined; mLastNonShaderReadOnlyLayout = ImageLayout::Undefined;
} }
commandBuffer->imageBarrier(srcStageMask, transitionTo.dstStageMask, imageMemoryBarrier); commandBuffer->imageBarrier(srcStageMask, GetImageLayoutDstStageMask(context, transitionTo),
imageMemoryBarrier);
mCurrentLayout = newLayout; mCurrentLayout = newLayout;
mCurrentQueueFamilyIndex = newQueueFamilyIndex; mCurrentQueueFamilyIndex = newQueueFamilyIndex;
} }
template void ImageHelper::barrierImpl<priv::SecondaryCommandBuffer>( template void ImageHelper::barrierImpl<priv::SecondaryCommandBuffer>(
Context *context,
VkImageAspectFlags aspectMask, VkImageAspectFlags aspectMask,
ImageLayout newLayout, ImageLayout newLayout,
uint32_t newQueueFamilyIndex, uint32_t newQueueFamilyIndex,
priv::SecondaryCommandBuffer *commandBuffer); priv::SecondaryCommandBuffer *commandBuffer);
bool ImageHelper::updateLayoutAndBarrier(VkImageAspectFlags aspectMask, bool ImageHelper::updateLayoutAndBarrier(Context *context,
VkImageAspectFlags aspectMask,
ImageLayout newLayout, ImageLayout newLayout,
PipelineBarrier *barrier) PipelineBarrier *barrier)
{ {
...@@ -4315,7 +4341,8 @@ bool ImageHelper::updateLayoutAndBarrier(VkImageAspectFlags aspectMask, ...@@ -4315,7 +4341,8 @@ bool ImageHelper::updateLayoutAndBarrier(VkImageAspectFlags aspectMask,
// changed. The following asserts that such a barrier is not attempted. // changed. The following asserts that such a barrier is not attempted.
ASSERT(layoutData.type == ResourceAccess::Write); ASSERT(layoutData.type == ResourceAccess::Write);
// No layout change, only memory barrier is required // No layout change, only memory barrier is required
barrier->mergeMemoryBarrier(layoutData.srcStageMask, layoutData.dstStageMask, barrier->mergeMemoryBarrier(GetImageLayoutSrcStageMask(context, layoutData),
GetImageLayoutDstStageMask(context, layoutData),
layoutData.srcAccessMask, layoutData.dstAccessMask); layoutData.srcAccessMask, layoutData.dstAccessMask);
barrierModified = true; barrierModified = true;
} }
...@@ -4323,8 +4350,8 @@ bool ImageHelper::updateLayoutAndBarrier(VkImageAspectFlags aspectMask, ...@@ -4323,8 +4350,8 @@ bool ImageHelper::updateLayoutAndBarrier(VkImageAspectFlags aspectMask,
{ {
const ImageMemoryBarrierData &transitionFrom = kImageMemoryBarrierData[mCurrentLayout]; const ImageMemoryBarrierData &transitionFrom = kImageMemoryBarrierData[mCurrentLayout];
const ImageMemoryBarrierData &transitionTo = kImageMemoryBarrierData[newLayout]; const ImageMemoryBarrierData &transitionTo = kImageMemoryBarrierData[newLayout];
VkPipelineStageFlags srcStageMask = transitionFrom.srcStageMask; VkPipelineStageFlags srcStageMask = GetImageLayoutSrcStageMask(context, transitionFrom);
VkPipelineStageFlags dstStageMask = transitionTo.dstStageMask; VkPipelineStageFlags dstStageMask = GetImageLayoutDstStageMask(context, transitionTo);
if (IsShaderReadOnlyLayout(transitionTo) && IsShaderReadOnlyLayout(transitionFrom)) if (IsShaderReadOnlyLayout(transitionTo) && IsShaderReadOnlyLayout(transitionFrom))
{ {
...@@ -4337,8 +4364,9 @@ bool ImageHelper::updateLayoutAndBarrier(VkImageAspectFlags aspectMask, ...@@ -4337,8 +4364,9 @@ bool ImageHelper::updateLayoutAndBarrier(VkImageAspectFlags aspectMask,
{ {
const ImageMemoryBarrierData &layoutData = const ImageMemoryBarrierData &layoutData =
kImageMemoryBarrierData[mLastNonShaderReadOnlyLayout]; kImageMemoryBarrierData[mLastNonShaderReadOnlyLayout];
barrier->mergeMemoryBarrier(layoutData.srcStageMask, dstStageMask, barrier->mergeMemoryBarrier(GetImageLayoutSrcStageMask(context, layoutData),
layoutData.srcAccessMask, transitionTo.dstAccessMask); dstStageMask, layoutData.srcAccessMask,
transitionTo.dstAccessMask);
barrierModified = true; barrierModified = true;
// Accumulate new read stage. // Accumulate new read stage.
mCurrentShaderReadStageMask |= dstStageMask; mCurrentShaderReadStageMask |= dstStageMask;
...@@ -5663,7 +5691,7 @@ angle::Result ImageHelper::flushStagedUpdates(ContextVk *contextVk, ...@@ -5663,7 +5691,7 @@ angle::Result ImageHelper::flushStagedUpdates(ContextVk *contextVk,
if (updateLayerCount >= kMaxParallelSubresourceUpload) if (updateLayerCount >= kMaxParallelSubresourceUpload)
{ {
// If there are more subresources than bits we can track, always insert a barrier. // If there are more subresources than bits we can track, always insert a barrier.
recordWriteBarrier(aspectFlags, ImageLayout::TransferDst, commandBuffer); recordWriteBarrier(contextVk, aspectFlags, ImageLayout::TransferDst, commandBuffer);
subresourceUploadsInProgress = std::numeric_limits<uint64_t>::max(); subresourceUploadsInProgress = std::numeric_limits<uint64_t>::max();
} }
else else
...@@ -5677,7 +5705,8 @@ angle::Result ImageHelper::flushStagedUpdates(ContextVk *contextVk, ...@@ -5677,7 +5705,8 @@ angle::Result ImageHelper::flushStagedUpdates(ContextVk *contextVk,
if ((subresourceUploadsInProgress & subresourceHash) != 0) if ((subresourceUploadsInProgress & subresourceHash) != 0)
{ {
// If there's overlap in subresource upload, issue a barrier. // If there's overlap in subresource upload, issue a barrier.
recordWriteBarrier(aspectFlags, ImageLayout::TransferDst, commandBuffer); recordWriteBarrier(contextVk, aspectFlags, ImageLayout::TransferDst,
commandBuffer);
subresourceUploadsInProgress = 0; subresourceUploadsInProgress = 0;
} }
subresourceUploadsInProgress |= subresourceHash; subresourceUploadsInProgress |= subresourceHash;
......
...@@ -995,11 +995,11 @@ class CommandBufferHelper : angle::NonCopyable ...@@ -995,11 +995,11 @@ class CommandBufferHelper : angle::NonCopyable
AliasingMode aliasingMode, AliasingMode aliasingMode,
BufferHelper *buffer); BufferHelper *buffer);
void imageRead(ResourceUseList *resourceUseList, void imageRead(ContextVk *contextVk,
VkImageAspectFlags aspectFlags, VkImageAspectFlags aspectFlags,
ImageLayout imageLayout, ImageLayout imageLayout,
ImageHelper *image); ImageHelper *image);
void imageWrite(ResourceUseList *resourceUseList, void imageWrite(ContextVk *contextVk,
gl::LevelIndex level, gl::LevelIndex level,
uint32_t layerStart, uint32_t layerStart,
uint32_t layerCount, uint32_t layerCount,
...@@ -1053,7 +1053,7 @@ class CommandBufferHelper : angle::NonCopyable ...@@ -1053,7 +1053,7 @@ class CommandBufferHelper : angle::NonCopyable
return mRenderPassStarted; return mRenderPassStarted;
} }
void onImageHelperRelease(const ImageHelper *image); void onImageHelperRelease(Context *context, const ImageHelper *image);
void beginRenderPass(const Framebuffer &framebuffer, void beginRenderPass(const Framebuffer &framebuffer,
const gl::Rectangle &renderArea, const gl::Rectangle &renderArea,
...@@ -1176,8 +1176,8 @@ class CommandBufferHelper : angle::NonCopyable ...@@ -1176,8 +1176,8 @@ class CommandBufferHelper : angle::NonCopyable
void restoreDepthContent(); void restoreDepthContent();
void restoreStencilContent(); void restoreStencilContent();
void finalizeDepthStencilImageLayout(); void finalizeDepthStencilImageLayout(Context *context);
void finalizeDepthStencilResolveImageLayout(); void finalizeDepthStencilResolveImageLayout(Context *context);
// Allocator used by this class. Using a pool allocator per CBH to avoid threading issues // Allocator used by this class. Using a pool allocator per CBH to avoid threading issues
// that occur w/ shared allocator between multiple CBHs. // that occur w/ shared allocator between multiple CBHs.
...@@ -1286,8 +1286,9 @@ enum class ImageLayout ...@@ -1286,8 +1286,9 @@ enum class ImageLayout
TransferDst, TransferDst,
VertexShaderReadOnly, VertexShaderReadOnly,
VertexShaderWrite, VertexShaderWrite,
GeometryShaderReadOnly, // PreFragment == Vertex, Tessellation and Geometry stages
GeometryShaderWrite, PreFragmentShadersReadOnly,
PreFragmentShadersWrite,
FragmentShaderReadOnly, FragmentShaderReadOnly,
FragmentShaderWrite, FragmentShaderWrite,
ComputeShaderReadOnly, ComputeShaderReadOnly,
...@@ -1621,17 +1622,19 @@ class ImageHelper final : public Resource, public angle::Subject ...@@ -1621,17 +1622,19 @@ class ImageHelper final : public Resource, public angle::Subject
uint32_t layerCount) const; uint32_t layerCount) const;
bool hasStagedUpdatesInAllocatedLevels() const; bool hasStagedUpdatesInAllocatedLevels() const;
void recordWriteBarrier(VkImageAspectFlags aspectMask, void recordWriteBarrier(Context *context,
VkImageAspectFlags aspectMask,
ImageLayout newLayout, ImageLayout newLayout,
CommandBuffer *commandBuffer) CommandBuffer *commandBuffer)
{ {
barrierImpl(aspectMask, newLayout, mCurrentQueueFamilyIndex, commandBuffer); barrierImpl(context, aspectMask, newLayout, mCurrentQueueFamilyIndex, commandBuffer);
} }
// This function can be used to prevent issuing redundant layout transition commands. // This function can be used to prevent issuing redundant layout transition commands.
bool isReadBarrierNecessary(ImageLayout newLayout) const; bool isReadBarrierNecessary(ImageLayout newLayout) const;
void recordReadBarrier(VkImageAspectFlags aspectMask, void recordReadBarrier(Context *context,
VkImageAspectFlags aspectMask,
ImageLayout newLayout, ImageLayout newLayout,
CommandBuffer *commandBuffer) CommandBuffer *commandBuffer)
{ {
...@@ -1640,7 +1643,7 @@ class ImageHelper final : public Resource, public angle::Subject ...@@ -1640,7 +1643,7 @@ class ImageHelper final : public Resource, public angle::Subject
return; return;
} }
barrierImpl(aspectMask, newLayout, mCurrentQueueFamilyIndex, commandBuffer); barrierImpl(context, aspectMask, newLayout, mCurrentQueueFamilyIndex, commandBuffer);
} }
bool isQueueChangeNeccesary(uint32_t newQueueFamilyIndex) const bool isQueueChangeNeccesary(uint32_t newQueueFamilyIndex) const
...@@ -1648,13 +1651,15 @@ class ImageHelper final : public Resource, public angle::Subject ...@@ -1648,13 +1651,15 @@ class ImageHelper final : public Resource, public angle::Subject
return mCurrentQueueFamilyIndex != newQueueFamilyIndex; return mCurrentQueueFamilyIndex != newQueueFamilyIndex;
} }
void changeLayoutAndQueue(VkImageAspectFlags aspectMask, void changeLayoutAndQueue(Context *context,
VkImageAspectFlags aspectMask,
ImageLayout newLayout, ImageLayout newLayout,
uint32_t newQueueFamilyIndex, uint32_t newQueueFamilyIndex,
CommandBuffer *commandBuffer); CommandBuffer *commandBuffer);
// Returns true if barrier has been generated // Returns true if barrier has been generated
bool updateLayoutAndBarrier(VkImageAspectFlags aspectMask, bool updateLayoutAndBarrier(Context *context,
VkImageAspectFlags aspectMask,
ImageLayout newLayout, ImageLayout newLayout,
PipelineBarrier *barrier); PipelineBarrier *barrier);
...@@ -1834,7 +1839,8 @@ class ImageHelper final : public Resource, public angle::Subject ...@@ -1834,7 +1839,8 @@ class ImageHelper final : public Resource, public angle::Subject
// Generalized to accept both "primary" and "secondary" command buffers. // Generalized to accept both "primary" and "secondary" command buffers.
template <typename CommandBufferT> template <typename CommandBufferT>
void barrierImpl(VkImageAspectFlags aspectMask, void barrierImpl(Context *context,
VkImageAspectFlags aspectMask,
ImageLayout newLayout, ImageLayout newLayout,
uint32_t newQueueFamilyIndex, uint32_t newQueueFamilyIndex,
CommandBufferT *commandBuffer); CommandBufferT *commandBuffer);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment