JBR-8740 Vulkan: Optimize BLIT (Part 2)

Buffer access management refactoring
This commit is contained in:
Alexey Ushakov
2025-07-11 14:45:12 +02:00
parent 666dc515d7
commit 8c3f3f66bb
4 changed files with 25 additions and 37 deletions

View File

@@ -225,8 +225,6 @@ VKBuffer* VKBuffer_Create(VKDevice* device, VkDeviceSize size,
VKBuffer_Destroy(device, buffer);
return NULL;
}
buffer->lastStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
buffer->lastAccess = 0;
return buffer;
}
@@ -275,8 +273,8 @@ VKBuffer *VKBuffer_CreateFromRaster(VKDevice *device,
VkCommandBuffer cb = VKRenderer_Record(device->renderer);
VkBufferMemoryBarrier barrier;
VKBarrierBatch barrierBatch = {};
VKRenderer_AddBufferBarrier(&barrier, &barrierBatch, buffer,
stage, access);
VKBuffer_AddBarrier(&barrier, &barrierBatch, buffer,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, stage, access);
if (barrierBatch.barrierCount > 0) {
device->vkCmdPipelineBarrier(cb, barrierBatch.srcStages,
@@ -312,3 +310,22 @@ void VKBuffer_Destroy(VKDevice* device, VKBuffer* buffer) {
free(buffer);
}
}
void VKBuffer_AddBarrier(VkBufferMemoryBarrier* barriers, VKBarrierBatch* batch, VKBuffer* buffer,
VkPipelineStageFlags srcStage, VkAccessFlags srcAccess,
VkPipelineStageFlags dstStage, VkAccessFlags dstAccess) {
assert(barriers != NULL && batch != NULL && buffer != NULL);
barriers[batch->barrierCount] = (VkBufferMemoryBarrier) {
.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
.srcAccessMask = srcAccess,
.dstAccessMask = dstAccess,
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.buffer = buffer->handle,
.offset = 0,
.size = VK_WHOLE_SIZE
};
batch->barrierCount++;
batch->srcStages |= srcStage;
batch->dstStages |= dstStage;
}

View File

@@ -35,8 +35,6 @@
struct VKBuffer {
VkBuffer handle;
VkPipelineStageFlagBits lastStage;
VkAccessFlagBits lastAccess;
// Buffer has no ownership over its memory.
// Provided memory, offset and size must only be used to flush memory writes.
// Allocation and freeing is done in pages.
@@ -96,4 +94,8 @@ void VKBuffer_Destroy(VKDevice* device, VKBuffer* buffer);
void VKBuffer_Dispose(VKDevice* device, void* ctx);
void VKBuffer_AddBarrier(VkBufferMemoryBarrier* barriers, VKBarrierBatch* batch, VKBuffer* buffer,
VkPipelineStageFlags srcStage, VkAccessFlags srcAccess,
VkPipelineStageFlags dstStage, VkAccessFlags dstAccess);
#endif // VKBuffer_h_Included

View File

@@ -603,33 +603,6 @@ void VKRenderer_AddImageBarrier(VkImageMemoryBarrier* barriers, VKBarrierBatch*
}
}
/**
* Prepare buffer barrier info to be executed in batch, if needed.
*/
void VKRenderer_AddBufferBarrier(VkBufferMemoryBarrier* barriers, VKBarrierBatch* batch,
VKBuffer* buffer, VkPipelineStageFlags stage,
VkAccessFlags access)
{
assert(barriers != NULL && batch != NULL && buffer != NULL);
if (stage != buffer->lastStage || access != buffer->lastAccess) {
barriers[batch->barrierCount] = (VkBufferMemoryBarrier) {
.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
.srcAccessMask = buffer->lastAccess,
.dstAccessMask = access,
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.buffer = buffer->handle,
.offset = 0,
.size = VK_WHOLE_SIZE
};
batch->barrierCount++;
batch->srcStages |= buffer->lastStage;
batch->dstStages |= stage;
buffer->lastStage = stage;
buffer->lastAccess = access;
}
}
/**
* Get Color RGBA components in a suitable for the current render pass.
*/

View File

@@ -82,10 +82,6 @@ VkCommandBuffer VKRenderer_Record(VKRenderer* renderer);
void VKRenderer_AddImageBarrier(VkImageMemoryBarrier* barriers, VKBarrierBatch* batch,
VKImage* image, VkPipelineStageFlags stage, VkAccessFlags access, VkImageLayout layout);
void VKRenderer_AddBufferBarrier(VkBufferMemoryBarrier* barriers, VKBarrierBatch* batch,
VKBuffer* buffer, VkPipelineStageFlags stage,
VkAccessFlags access);
/**
* Record barrier batches into the primary command buffer.
*/