27 void SetCommandBuffer(vk::CommandBuffer* pCommandBuffer);
47 void AccessBuffer(
const plGALBufferVulkan* pBuffer, vk::DeviceSize offset, vk::DeviceSize length, vk::PipelineStageFlags srcStages, vk::AccessFlags srcAccess, vk::PipelineStageFlags dstStages, vk::AccessFlags dstAccess);
48 bool IsDirty(vk::Buffer buffer, vk::DeviceSize offset, vk::DeviceSize length, vk::AccessFlags dstAccess);
58 void SetInitialImageState(
const plGALTextureVulkan* pTexture, vk::ImageLayout dstLayout, vk::PipelineStageFlags dstStages = vk::PipelineStageFlagBits::eTopOfPipe, vk::AccessFlags dstAccess = {});
69 void EnsureImageLayout(
const plGALTextureVulkan* pTexture, vk::ImageLayout dstLayout, vk::PipelineStageFlags dstStages, vk::AccessFlags dstAccess,
bool bDiscardSource =
false);
70 void EnsureImageLayout(
const plGALRenderTargetViewVulkan* pTextureView, vk::ImageLayout dstLayout, vk::PipelineStageFlags dstStages, vk::AccessFlags dstAccess,
bool bDiscardSource =
false);
71 void EnsureImageLayout(
const plGALTextureResourceViewVulkan* pTextureView, vk::ImageLayout dstLayout, vk::PipelineStageFlags dstStages, vk::AccessFlags dstAccess,
bool bDiscardSource =
false);
72 void EnsureImageLayout(
const plGALTextureUnorderedAccessViewVulkan* pTextureView, vk::ImageLayout dstLayout, vk::PipelineStageFlags dstStages, vk::AccessFlags dstAccess,
bool bDiscardSource =
false);
73 void EnsureImageLayout(
const plGALTextureVulkan* pTexture, vk::ImageSubresourceRange subResources, vk::ImageLayout dstLayout, vk::PipelineStageFlags dstStages, vk::AccessFlags dstAccess,
bool bDiscardSource =
false);
75 bool IsDirty(vk::Image image,
const vk::ImageSubresourceRange& subResources)
const;
79 struct SubElementState
81 vk::PipelineStageFlags m_stages;
82 vk::AccessFlags m_accessMask;
83 vk::ImageLayout m_layout;
85 bool operator==(
const SubElementState& rhs)
const
87 return m_stages == rhs.m_stages && m_accessMask == rhs.m_accessMask && m_layout == rhs.m_layout;
100 vk::DeviceSize m_offset;
101 vk::DeviceSize m_length;
102 vk::PipelineStageFlags m_stages;
103 vk::AccessFlags m_accessMask;
113 bool AddBufferBarrierInternal(vk::Buffer buffer, vk::DeviceSize offset, vk::DeviceSize length,
114 vk::PipelineStageFlags srcStages, vk::AccessFlags srcAccess,
115 vk::PipelineStageFlags dstStages, vk::AccessFlags dstAccess);
116 bool IsDirtyInternal(
const BufferState& state,
const SubBufferState& subState)
const;
118 bool AddImageBarrierInternal(vk::Image image,
const vk::ImageSubresourceRange& subResources,
119 vk::ImageLayout srcLayout, vk::AccessFlags srcAccess,
120 vk::ImageLayout dstLayout, vk::AccessFlags dstAccess,
bool bDiscardSource);
121 bool IsDirtyInternal(
const ImageState& state,
const vk::ImageSubresourceRange& subResources)
const;
124 static constexpr vk::AccessFlags s_readAccess = vk::AccessFlagBits::eIndirectCommandRead | vk::AccessFlagBits::eIndexRead |
125 vk::AccessFlagBits::eVertexAttributeRead | vk::AccessFlagBits::eUniformRead |
126 vk::AccessFlagBits::eInputAttachmentRead | vk::AccessFlagBits::eShaderRead |
127 vk::AccessFlagBits::eColorAttachmentRead | vk::AccessFlagBits::eDepthStencilAttachmentRead |
128 vk::AccessFlagBits::eTransferRead | vk::AccessFlagBits::eHostRead |
129 vk::AccessFlagBits::eMemoryRead
135 static constexpr vk::AccessFlags s_writeAccess = vk::AccessFlagBits::eShaderWrite | vk::AccessFlagBits::eColorAttachmentWrite |
136 vk::AccessFlagBits::eDepthStencilAttachmentWrite | vk::AccessFlagBits::eTransferWrite |
137 vk::AccessFlagBits::eHostWrite | vk::AccessFlagBits::eMemoryWrite
141 vk::CommandBuffer* m_pCommandBuffer =
nullptr;
143 vk::PipelineStageFlags m_srcStageMask;
144 vk::PipelineStageFlags m_dstStageMask;
146 vk::AccessFlags m_srcAccess;
147 vk::AccessFlags m_dstAccess;
Definition ResourceViewVulkan.h:12
Definition UnorderedAccessViewVulkan.h:11
Definition TextureVulkan.h:11