mirror of
https://github.com/libretro/ppsspp.git
synced 2025-03-04 14:37:17 +00:00
Refactor: Split out VulkanQueueRunner from VulkanRenderManager
Should be no functionality change, but good to do before adding more functionality.
This commit is contained in:
parent
fa81dd9c44
commit
3f918ed328
@ -1 +1,690 @@
|
||||
#include "VulkanQueueRunner.h"
|
||||
#include "VulkanRenderManager.h"
|
||||
|
||||
void VulkanQueueRunner::CreateDeviceObjects() {
|
||||
InitBackbufferRenderPass();
|
||||
InitRenderpasses();
|
||||
}
|
||||
|
||||
void VulkanQueueRunner::DestroyDeviceObjects() {
|
||||
VkDevice device = vulkan_->GetDevice();
|
||||
for (int i = 0; i < ARRAY_SIZE(renderPasses_); i++) {
|
||||
assert(renderPasses_[i] != VK_NULL_HANDLE);
|
||||
vkDestroyRenderPass(device, renderPasses_[i], nullptr);
|
||||
}
|
||||
assert(backbufferRenderPass_ != VK_NULL_HANDLE);
|
||||
vkDestroyRenderPass(device, backbufferRenderPass_, nullptr);
|
||||
}
|
||||
|
||||
|
||||
void VulkanQueueRunner::InitBackbufferRenderPass() {
|
||||
VkResult U_ASSERT_ONLY res;
|
||||
|
||||
VkAttachmentDescription attachments[2];
|
||||
attachments[0].format = vulkan_->GetSwapchainFormat();
|
||||
attachments[0].samples = VK_SAMPLE_COUNT_1_BIT;
|
||||
attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
|
||||
attachments[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
|
||||
attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
||||
attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
|
||||
attachments[0].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
||||
attachments[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
||||
attachments[0].flags = 0;
|
||||
|
||||
attachments[1].format = vulkan_->GetDeviceInfo().preferredDepthStencilFormat; // must use this same format later for the back depth buffer.
|
||||
attachments[1].samples = VK_SAMPLE_COUNT_1_BIT;
|
||||
attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
|
||||
attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
|
||||
attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
|
||||
attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
|
||||
attachments[1].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
attachments[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
attachments[1].flags = 0;
|
||||
|
||||
VkAttachmentReference color_reference = {};
|
||||
color_reference.attachment = 0;
|
||||
color_reference.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
||||
|
||||
VkAttachmentReference depth_reference{};
|
||||
depth_reference.attachment = 1;
|
||||
depth_reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
|
||||
VkSubpassDescription subpass = {};
|
||||
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
|
||||
subpass.flags = 0;
|
||||
subpass.inputAttachmentCount = 0;
|
||||
subpass.pInputAttachments = nullptr;
|
||||
subpass.colorAttachmentCount = 1;
|
||||
subpass.pColorAttachments = &color_reference;
|
||||
subpass.pResolveAttachments = nullptr;
|
||||
subpass.pDepthStencilAttachment = &depth_reference;
|
||||
subpass.preserveAttachmentCount = 0;
|
||||
subpass.pPreserveAttachments = nullptr;
|
||||
|
||||
VkRenderPassCreateInfo rp_info = { VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO };
|
||||
rp_info.pNext = nullptr;
|
||||
rp_info.attachmentCount = 2;
|
||||
rp_info.pAttachments = attachments;
|
||||
rp_info.subpassCount = 1;
|
||||
rp_info.pSubpasses = &subpass;
|
||||
rp_info.dependencyCount = 0;
|
||||
rp_info.pDependencies = nullptr;
|
||||
|
||||
res = vkCreateRenderPass(vulkan_->GetDevice(), &rp_info, nullptr, &backbufferRenderPass_);
|
||||
assert(res == VK_SUCCESS);
|
||||
}
|
||||
|
||||
void VulkanQueueRunner::InitRenderpasses() {
|
||||
// Create a bunch of render pass objects, for normal rendering with a depth buffer,
|
||||
// with clearing, without clearing, and dont-care for both depth/stencil and color, so 3*3=9 combos.
|
||||
VkAttachmentDescription attachments[2] = {};
|
||||
attachments[0].format = VK_FORMAT_R8G8B8A8_UNORM;
|
||||
attachments[0].samples = VK_SAMPLE_COUNT_1_BIT;
|
||||
attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
|
||||
attachments[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
|
||||
attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
||||
attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
|
||||
attachments[0].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
||||
attachments[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
||||
attachments[0].flags = 0;
|
||||
|
||||
attachments[1].format = vulkan_->GetDeviceInfo().preferredDepthStencilFormat;
|
||||
attachments[1].samples = VK_SAMPLE_COUNT_1_BIT;
|
||||
attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
|
||||
attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
|
||||
attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
|
||||
attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
|
||||
attachments[1].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
attachments[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
attachments[1].flags = 0;
|
||||
|
||||
VkAttachmentReference color_reference = {};
|
||||
color_reference.attachment = 0;
|
||||
color_reference.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
||||
|
||||
VkAttachmentReference depth_reference = {};
|
||||
depth_reference.attachment = 1;
|
||||
depth_reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
|
||||
VkSubpassDescription subpass = {};
|
||||
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
|
||||
subpass.flags = 0;
|
||||
subpass.inputAttachmentCount = 0;
|
||||
subpass.pInputAttachments = nullptr;
|
||||
subpass.colorAttachmentCount = 1;
|
||||
subpass.pColorAttachments = &color_reference;
|
||||
subpass.pResolveAttachments = nullptr;
|
||||
subpass.pDepthStencilAttachment = &depth_reference;
|
||||
subpass.preserveAttachmentCount = 0;
|
||||
subpass.pPreserveAttachments = nullptr;
|
||||
|
||||
VkRenderPassCreateInfo rp = { VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO };
|
||||
rp.attachmentCount = 2;
|
||||
rp.pAttachments = attachments;
|
||||
rp.subpassCount = 1;
|
||||
rp.pSubpasses = &subpass;
|
||||
rp.dependencyCount = 0;
|
||||
rp.pDependencies = nullptr;
|
||||
|
||||
for (int depth = 0; depth < 3; depth++) {
|
||||
switch ((VKRRenderPassAction)depth) {
|
||||
case VKRRenderPassAction::CLEAR:
|
||||
attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
|
||||
attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
|
||||
break;
|
||||
case VKRRenderPassAction::KEEP:
|
||||
attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
|
||||
attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
|
||||
break;
|
||||
case VKRRenderPassAction::DONT_CARE:
|
||||
attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
||||
attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
||||
break;
|
||||
}
|
||||
for (int color = 0; color < 3; color++) {
|
||||
switch ((VKRRenderPassAction)color) {
|
||||
case VKRRenderPassAction::CLEAR: attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; break;
|
||||
case VKRRenderPassAction::KEEP: attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD; break;
|
||||
case VKRRenderPassAction::DONT_CARE: attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; break;
|
||||
}
|
||||
int index = RPIndex((VKRRenderPassAction)color, (VKRRenderPassAction)depth);
|
||||
vkCreateRenderPass(vulkan_->GetDevice(), &rp, nullptr, &renderPasses_[index]);
|
||||
}
|
||||
}
|
||||
}
|
||||
void VulkanQueueRunner::RunSteps(VkCommandBuffer cmd, const std::vector<VKRStep *> &steps) {
|
||||
// Optimizes renderpasses, then sequences them.
|
||||
for (int i = 0; i < steps.size(); i++) {
|
||||
const VKRStep &step = *steps[i];
|
||||
switch (step.stepType) {
|
||||
case VKRStepType::RENDER:
|
||||
PerformRenderPass(step, cmd);
|
||||
break;
|
||||
case VKRStepType::COPY:
|
||||
PerformCopy(step, cmd);
|
||||
break;
|
||||
case VKRStepType::BLIT:
|
||||
PerformBlit(step, cmd);
|
||||
break;
|
||||
case VKRStepType::READBACK:
|
||||
// PerformReadback
|
||||
break;
|
||||
}
|
||||
delete steps[i];
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanQueueRunner::PerformRenderPass(const VKRStep &step, VkCommandBuffer cmd) {
|
||||
// TODO: If there are multiple, we can transition them together.
|
||||
for (const auto &iter : step.preTransitions) {
|
||||
if (iter.fb->color.layout != iter.targetLayout) {
|
||||
VkImageMemoryBarrier barrier{};
|
||||
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
||||
barrier.oldLayout = iter.fb->color.layout;
|
||||
barrier.subresourceRange.layerCount = 1;
|
||||
barrier.subresourceRange.levelCount = 1;
|
||||
barrier.image = iter.fb->color.image;
|
||||
barrier.srcAccessMask = 0;
|
||||
VkPipelineStageFlags srcStage;
|
||||
VkPipelineStageFlags dstStage;
|
||||
switch (barrier.oldLayout) {
|
||||
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
break;
|
||||
}
|
||||
barrier.newLayout = iter.targetLayout;
|
||||
switch (barrier.newLayout) {
|
||||
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
|
||||
barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
||||
dstStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
break;
|
||||
}
|
||||
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
|
||||
vkCmdPipelineBarrier(cmd, srcStage, dstStage, 0, 0, nullptr, 0, nullptr, 1, &barrier);
|
||||
iter.fb->color.layout = barrier.newLayout;
|
||||
}
|
||||
}
|
||||
|
||||
// This is supposed to bind a vulkan render pass to the command buffer.
|
||||
PerformBindFramebufferAsRenderTarget(step, cmd);
|
||||
|
||||
int curWidth = step.render.framebuffer ? step.render.framebuffer->width : vulkan_->GetBackbufferWidth();
|
||||
int curHeight = step.render.framebuffer ? step.render.framebuffer->height : vulkan_->GetBackbufferHeight();
|
||||
|
||||
VKRFramebuffer *fb = step.render.framebuffer;
|
||||
|
||||
VkPipeline lastPipeline = VK_NULL_HANDLE;
|
||||
|
||||
auto &commands = step.commands;
|
||||
|
||||
// TODO: Dynamic state commands (SetViewport, SetScissor, SetBlendConstants, SetStencil*) are only
|
||||
// valid when a pipeline is bound with those as dynamic state. So we need to add some state tracking here
|
||||
// for this to be correct. This is a bit of a pain but also will let us eliminate redundant calls.
|
||||
|
||||
for (const auto &c : commands) {
|
||||
switch (c.cmd) {
|
||||
case VKRRenderCommand::BIND_PIPELINE:
|
||||
if (c.pipeline.pipeline != lastPipeline) {
|
||||
vkCmdBindPipeline(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS, c.pipeline.pipeline);
|
||||
lastPipeline = c.pipeline.pipeline;
|
||||
}
|
||||
break;
|
||||
|
||||
case VKRRenderCommand::VIEWPORT:
|
||||
vkCmdSetViewport(cmd, 0, 1, &c.viewport.vp);
|
||||
break;
|
||||
|
||||
case VKRRenderCommand::SCISSOR:
|
||||
vkCmdSetScissor(cmd, 0, 1, &c.scissor.scissor);
|
||||
break;
|
||||
|
||||
case VKRRenderCommand::BLEND:
|
||||
vkCmdSetBlendConstants(cmd, c.blendColor.color);
|
||||
break;
|
||||
|
||||
case VKRRenderCommand::STENCIL:
|
||||
vkCmdSetStencilWriteMask(cmd, VK_STENCIL_FRONT_AND_BACK, c.stencil.stencilWriteMask);
|
||||
vkCmdSetStencilCompareMask(cmd, VK_STENCIL_FRONT_AND_BACK, c.stencil.stencilCompareMask);
|
||||
vkCmdSetStencilReference(cmd, VK_STENCIL_FRONT_AND_BACK, c.stencil.stencilRef);
|
||||
break;
|
||||
|
||||
case VKRRenderCommand::DRAW_INDEXED:
|
||||
vkCmdBindDescriptorSets(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS, c.drawIndexed.pipelineLayout, 0, 1, &c.drawIndexed.ds, c.drawIndexed.numUboOffsets, c.drawIndexed.uboOffsets);
|
||||
vkCmdBindIndexBuffer(cmd, c.drawIndexed.ibuffer, c.drawIndexed.ioffset, VK_INDEX_TYPE_UINT16);
|
||||
vkCmdBindVertexBuffers(cmd, 0, 1, &c.drawIndexed.vbuffer, &c.drawIndexed.voffset);
|
||||
vkCmdDrawIndexed(cmd, c.drawIndexed.count, c.drawIndexed.instances, 0, 0, 0);
|
||||
break;
|
||||
|
||||
case VKRRenderCommand::DRAW:
|
||||
vkCmdBindDescriptorSets(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS, c.draw.pipelineLayout, 0, 1, &c.draw.ds, c.draw.numUboOffsets, c.draw.uboOffsets);
|
||||
vkCmdBindVertexBuffers(cmd, 0, 1, &c.draw.vbuffer, &c.draw.voffset);
|
||||
vkCmdDraw(cmd, c.draw.count, 1, 0, 0);
|
||||
break;
|
||||
|
||||
case VKRRenderCommand::CLEAR:
|
||||
{
|
||||
int numAttachments = 0;
|
||||
VkClearRect rc{};
|
||||
rc.baseArrayLayer = 0;
|
||||
rc.layerCount = 1;
|
||||
rc.rect.extent.width = curWidth;
|
||||
rc.rect.extent.height = curHeight;
|
||||
VkClearAttachment attachments[2];
|
||||
if (c.clear.clearMask & VK_IMAGE_ASPECT_COLOR_BIT) {
|
||||
VkClearAttachment &attachment = attachments[numAttachments++];
|
||||
attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
attachment.colorAttachment = 0;
|
||||
Uint8x4ToFloat4(attachment.clearValue.color.float32, c.clear.clearColor);
|
||||
}
|
||||
if (c.clear.clearMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
|
||||
VkClearAttachment &attachment = attachments[numAttachments++];
|
||||
attachment.aspectMask = 0;
|
||||
if (c.clear.clearMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
|
||||
attachment.clearValue.depthStencil.depth = c.clear.clearZ;
|
||||
attachment.aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
|
||||
}
|
||||
if (c.clear.clearMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
|
||||
attachment.clearValue.depthStencil.stencil = c.clear.clearStencil;
|
||||
attachment.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
||||
}
|
||||
}
|
||||
if (numAttachments) {
|
||||
vkCmdClearAttachments(cmd, numAttachments, attachments, 1, &rc);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
ELOG("Unimpl queue command");
|
||||
;
|
||||
}
|
||||
}
|
||||
vkCmdEndRenderPass(cmd);
|
||||
|
||||
// Transition the framebuffer if requested.
|
||||
if (fb && step.render.finalColorLayout != VK_IMAGE_LAYOUT_UNDEFINED) {
|
||||
VkImageMemoryBarrier barrier{};
|
||||
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
||||
barrier.oldLayout = fb->color.layout;
|
||||
barrier.subresourceRange.layerCount = 1;
|
||||
barrier.subresourceRange.levelCount = 1;
|
||||
barrier.image = fb->color.image;
|
||||
barrier.srcAccessMask = 0;
|
||||
switch (barrier.oldLayout) {
|
||||
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
}
|
||||
barrier.newLayout = step.render.finalColorLayout;
|
||||
switch (barrier.newLayout) {
|
||||
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
|
||||
barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
}
|
||||
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
|
||||
// we're between passes so it's OK.
|
||||
// ARM Best Practices guide recommends these stage bits.
|
||||
vkCmdPipelineBarrier(cmd, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &barrier);
|
||||
fb->color.layout = barrier.newLayout;
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanQueueRunner::PerformBindFramebufferAsRenderTarget(const VKRStep &step, VkCommandBuffer cmd) {
|
||||
VkFramebuffer framebuf;
|
||||
int w;
|
||||
int h;
|
||||
VkImageLayout prevLayout;
|
||||
if (step.render.framebuffer) {
|
||||
VKRFramebuffer *fb = step.render.framebuffer;
|
||||
framebuf = fb->framebuf;
|
||||
w = fb->width;
|
||||
h = fb->height;
|
||||
prevLayout = fb->color.layout;
|
||||
} else {
|
||||
framebuf = backbuffer_;
|
||||
w = vulkan_->GetBackbufferWidth();
|
||||
h = vulkan_->GetBackbufferHeight();
|
||||
}
|
||||
|
||||
VkRenderPass renderPass;
|
||||
int numClearVals = 0;
|
||||
VkClearValue clearVal[2];
|
||||
memset(clearVal, 0, sizeof(clearVal));
|
||||
if (step.render.framebuffer) {
|
||||
VKRFramebuffer *fb = step.render.framebuffer;
|
||||
// Now, if the image needs transitioning, let's transition.
|
||||
// The backbuffer does not, that's handled by VulkanContext.
|
||||
if (step.render.framebuffer->color.layout != VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
|
||||
VkAccessFlags srcAccessMask;
|
||||
VkPipelineStageFlags srcStage;
|
||||
switch (fb->color.layout) {
|
||||
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
|
||||
srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
|
||||
srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
|
||||
srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
break;
|
||||
}
|
||||
|
||||
TransitionImageLayout2(cmd, fb->color.image, VK_IMAGE_ASPECT_COLOR_BIT,
|
||||
fb->color.layout, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
|
||||
srcStage, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
|
||||
srcAccessMask, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT);
|
||||
fb->color.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
||||
}
|
||||
if (fb->depth.layout != VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL) {
|
||||
VkAccessFlags srcAccessMask;
|
||||
VkPipelineStageFlags srcStage;
|
||||
switch (fb->depth.layout) {
|
||||
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
|
||||
srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
|
||||
srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
|
||||
srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
break;
|
||||
}
|
||||
TransitionImageLayout2(cmd, fb->depth.image, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT,
|
||||
fb->depth.layout, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
|
||||
srcStage, VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
|
||||
srcAccessMask, VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT);
|
||||
fb->depth.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
}
|
||||
|
||||
renderPass = renderPasses_[RPIndex(step.render.color, step.render.depthStencil)];
|
||||
// VLOG("Switching framebuffer to FBO (fc=%d, cmd=%x, rp=%x)", frameNum_, (int)(uintptr_t)cmd_, (int)(uintptr_t)renderPass);
|
||||
if (step.render.color == VKRRenderPassAction::CLEAR) {
|
||||
Uint8x4ToFloat4(clearVal[0].color.float32, step.render.clearColor);
|
||||
numClearVals = 1;
|
||||
}
|
||||
if (step.render.depthStencil == VKRRenderPassAction::CLEAR) {
|
||||
clearVal[1].depthStencil.depth = step.render.clearDepth;
|
||||
clearVal[1].depthStencil.stencil = step.render.clearStencil;
|
||||
numClearVals = 2;
|
||||
}
|
||||
} else {
|
||||
renderPass = GetBackbufferRenderPass();
|
||||
numClearVals = 2; // We don't bother with a depth buffer here.
|
||||
clearVal[1].depthStencil.depth = 0.0f;
|
||||
clearVal[1].depthStencil.stencil = 0;
|
||||
}
|
||||
|
||||
VkRenderPassBeginInfo rp_begin = { VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO };
|
||||
rp_begin.renderPass = renderPass;
|
||||
rp_begin.framebuffer = framebuf;
|
||||
rp_begin.renderArea.offset.x = 0;
|
||||
rp_begin.renderArea.offset.y = 0;
|
||||
rp_begin.renderArea.extent.width = w;
|
||||
rp_begin.renderArea.extent.height = h;
|
||||
rp_begin.clearValueCount = numClearVals;
|
||||
rp_begin.pClearValues = numClearVals ? clearVal : nullptr;
|
||||
vkCmdBeginRenderPass(cmd, &rp_begin, VK_SUBPASS_CONTENTS_INLINE);
|
||||
}
|
||||
|
||||
void VulkanQueueRunner::PerformCopy(const VKRStep &step, VkCommandBuffer cmd) {
|
||||
VKRFramebuffer *src = step.copy.src;
|
||||
VKRFramebuffer *dst = step.copy.dst;
|
||||
|
||||
VkImageCopy copy{};
|
||||
copy.srcOffset.x = step.copy.srcRect.offset.x;
|
||||
copy.srcOffset.y = step.copy.srcRect.offset.y;
|
||||
copy.srcOffset.z = 0;
|
||||
copy.srcSubresource.mipLevel = 0;
|
||||
copy.srcSubresource.layerCount = 1;
|
||||
copy.dstOffset.x = step.copy.dstPos.x;
|
||||
copy.dstOffset.y = step.copy.dstPos.y;
|
||||
copy.dstOffset.z = 0;
|
||||
copy.dstSubresource.mipLevel = 0;
|
||||
copy.dstSubresource.layerCount = 1;
|
||||
copy.extent.width = step.copy.srcRect.extent.width;
|
||||
copy.extent.height = step.copy.srcRect.extent.height;
|
||||
copy.extent.depth = 1;
|
||||
|
||||
VkImageMemoryBarrier srcBarriers[2]{};
|
||||
VkImageMemoryBarrier dstBarriers[2]{};
|
||||
int srcCount = 0;
|
||||
int dstCount = 0;
|
||||
|
||||
VkPipelineStageFlags srcStage = 0;
|
||||
VkPipelineStageFlags dstStage = 0;
|
||||
// First source barriers.
|
||||
if (step.copy.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
|
||||
if (src->color.layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) {
|
||||
SetupTransitionToTransferSrc(src->color, srcBarriers[srcCount++], srcStage, VK_IMAGE_ASPECT_COLOR_BIT);
|
||||
}
|
||||
if (dst->color.layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
|
||||
SetupTransitionToTransferDst(dst->color, dstBarriers[dstCount++], dstStage, VK_IMAGE_ASPECT_COLOR_BIT);
|
||||
}
|
||||
}
|
||||
|
||||
// We can't copy only depth or only stencil unfortunately.
|
||||
if (step.copy.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
|
||||
if (src->depth.layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) {
|
||||
SetupTransitionToTransferSrc(src->depth, srcBarriers[srcCount++], srcStage, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
|
||||
}
|
||||
if (dst->depth.layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
|
||||
SetupTransitionToTransferDst(dst->depth, dstBarriers[dstCount++], dstStage, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
|
||||
}
|
||||
}
|
||||
|
||||
if (srcCount) {
|
||||
vkCmdPipelineBarrier(cmd, srcStage, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, srcCount, srcBarriers);
|
||||
}
|
||||
if (dstCount) {
|
||||
vkCmdPipelineBarrier(cmd, dstStage, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, dstCount, dstBarriers);
|
||||
}
|
||||
|
||||
if (step.copy.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
|
||||
copy.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
copy.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
vkCmdCopyImage(cmd, src->color.image, src->color.layout, dst->color.image, dst->color.layout, 1, ©);
|
||||
}
|
||||
if (step.copy.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
|
||||
copy.srcSubresource.aspectMask = 0;
|
||||
copy.dstSubresource.aspectMask = 0;
|
||||
if (step.copy.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
|
||||
copy.srcSubresource.aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
|
||||
copy.dstSubresource.aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
|
||||
}
|
||||
if (step.copy.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
|
||||
copy.srcSubresource.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
||||
copy.dstSubresource.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
||||
}
|
||||
vkCmdCopyImage(cmd, src->depth.image, src->depth.layout, dst->depth.image, dst->depth.layout, 1, ©);
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanQueueRunner::PerformBlit(const VKRStep &step, VkCommandBuffer cmd) {
|
||||
VkImageMemoryBarrier srcBarriers[2]{};
|
||||
VkImageMemoryBarrier dstBarriers[2]{};
|
||||
|
||||
VKRFramebuffer *src = step.blit.src;
|
||||
VKRFramebuffer *dst = step.blit.dst;
|
||||
|
||||
// If any validation needs to be performed here, it should probably have been done
|
||||
// already when the blit was queued. So don't validate here.
|
||||
VkImageBlit blit{};
|
||||
blit.srcOffsets[0].x = step.blit.srcRect.offset.x;
|
||||
blit.srcOffsets[0].y = step.blit.srcRect.offset.y;
|
||||
blit.srcOffsets[0].z = 0;
|
||||
blit.srcOffsets[1].x = step.blit.srcRect.offset.x + step.blit.srcRect.extent.width;
|
||||
blit.srcOffsets[1].y = step.blit.srcRect.offset.y + step.blit.srcRect.extent.height;
|
||||
blit.srcOffsets[1].z = 1;
|
||||
blit.srcSubresource.mipLevel = 0;
|
||||
blit.srcSubresource.layerCount = 1;
|
||||
blit.dstOffsets[0].x = step.blit.dstRect.offset.x;
|
||||
blit.dstOffsets[0].y = step.blit.dstRect.offset.y;
|
||||
blit.dstOffsets[0].z = 0;
|
||||
blit.dstOffsets[1].x = step.blit.dstRect.offset.x + step.blit.dstRect.extent.width;
|
||||
blit.dstOffsets[1].y = step.blit.dstRect.offset.y + step.blit.dstRect.extent.height;
|
||||
blit.dstOffsets[1].z = 1;
|
||||
blit.dstSubresource.mipLevel = 0;
|
||||
blit.dstSubresource.layerCount = 1;
|
||||
|
||||
VkPipelineStageFlags srcStage = 0;
|
||||
VkPipelineStageFlags dstStage = 0;
|
||||
|
||||
int srcCount = 0;
|
||||
int dstCount = 0;
|
||||
|
||||
// First source barriers.
|
||||
if (step.blit.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
|
||||
if (src->color.layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) {
|
||||
SetupTransitionToTransferSrc(src->color, srcBarriers[srcCount++], srcStage, VK_IMAGE_ASPECT_COLOR_BIT);
|
||||
}
|
||||
if (dst->color.layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
|
||||
SetupTransitionToTransferDst(dst->color, dstBarriers[dstCount++], dstStage, VK_IMAGE_ASPECT_COLOR_BIT);
|
||||
}
|
||||
}
|
||||
|
||||
// We can't copy only depth or only stencil unfortunately.
|
||||
if (step.blit.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
|
||||
if (src->depth.layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) {
|
||||
SetupTransitionToTransferSrc(src->depth, srcBarriers[srcCount++], srcStage, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
|
||||
}
|
||||
if (dst->depth.layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
|
||||
SetupTransitionToTransferDst(dst->depth, dstBarriers[dstCount++], dstStage, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
|
||||
}
|
||||
}
|
||||
|
||||
if (srcCount) {
|
||||
vkCmdPipelineBarrier(cmd, srcStage, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, srcCount, srcBarriers);
|
||||
}
|
||||
if (dstCount) {
|
||||
vkCmdPipelineBarrier(cmd, dstStage, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, dstCount, dstBarriers);
|
||||
}
|
||||
|
||||
if (step.blit.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
|
||||
blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
vkCmdBlitImage(cmd, src->color.image, src->color.layout, dst->color.image, dst->color.layout, 1, &blit, step.blit.filter);
|
||||
}
|
||||
|
||||
// TODO: Need to check if the depth format is blittable.
|
||||
// Actually, we should probably almost always use copies rather than blits for depth buffers.
|
||||
if (step.blit.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
|
||||
blit.srcSubresource.aspectMask = 0;
|
||||
blit.dstSubresource.aspectMask = 0;
|
||||
if (step.blit.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
|
||||
blit.srcSubresource.aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
|
||||
blit.dstSubresource.aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
|
||||
}
|
||||
if (step.blit.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
|
||||
blit.srcSubresource.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
||||
blit.dstSubresource.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
||||
}
|
||||
vkCmdBlitImage(cmd, src->depth.image, src->depth.layout, dst->depth.image, dst->depth.layout, 1, &blit, step.blit.filter);
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanQueueRunner::SetupTransitionToTransferSrc(VKRImage &img, VkImageMemoryBarrier &barrier, VkPipelineStageFlags &stage, VkImageAspectFlags aspect) {
|
||||
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
||||
barrier.oldLayout = img.layout;
|
||||
barrier.subresourceRange.layerCount = 1;
|
||||
barrier.subresourceRange.levelCount = 1;
|
||||
barrier.image = img.image;
|
||||
barrier.srcAccessMask = 0;
|
||||
switch (img.layout) {
|
||||
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
}
|
||||
barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
||||
barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
|
||||
barrier.subresourceRange.aspectMask = aspect;
|
||||
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
img.layout = barrier.newLayout;
|
||||
}
|
||||
|
||||
void VulkanQueueRunner::SetupTransitionToTransferDst(VKRImage &img, VkImageMemoryBarrier &barrier, VkPipelineStageFlags &stage, VkImageAspectFlags aspect) {
|
||||
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
||||
barrier.oldLayout = img.layout;
|
||||
barrier.subresourceRange.layerCount = 1;
|
||||
barrier.subresourceRange.levelCount = 1;
|
||||
barrier.image = img.image;
|
||||
barrier.srcAccessMask = 0;
|
||||
switch (img.layout) {
|
||||
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
}
|
||||
barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
||||
barrier.subresourceRange.aspectMask = aspect;
|
||||
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
img.layout = barrier.newLayout;
|
||||
}
|
||||
|
@ -1,6 +1,182 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
#include "Common/Vulkan/VulkanContext.h"
|
||||
#include "math/dataconv.h"
|
||||
#include "thin3d/thin3d.h"
|
||||
|
||||
class VKRFramebuffer;
|
||||
struct VKRImage;
|
||||
|
||||
// The cool thing is that you can Flush on a different thread than you record the commands on!
|
||||
|
||||
enum class VKRRenderCommand : uint8_t {
|
||||
BIND_PIPELINE,
|
||||
STENCIL,
|
||||
BLEND,
|
||||
VIEWPORT,
|
||||
SCISSOR,
|
||||
CLEAR,
|
||||
DRAW,
|
||||
DRAW_INDEXED,
|
||||
};
|
||||
|
||||
struct VkRenderData {
|
||||
VKRRenderCommand cmd;
|
||||
union {
|
||||
struct {
|
||||
VkPipeline pipeline;
|
||||
} pipeline;
|
||||
struct {
|
||||
VkPipelineLayout pipelineLayout;
|
||||
VkDescriptorSet ds;
|
||||
int numUboOffsets;
|
||||
uint32_t uboOffsets[3];
|
||||
VkBuffer vbuffer;
|
||||
VkDeviceSize voffset;
|
||||
uint32_t count;
|
||||
} draw;
|
||||
struct {
|
||||
VkPipelineLayout pipelineLayout;
|
||||
VkDescriptorSet ds;
|
||||
int numUboOffsets;
|
||||
uint32_t uboOffsets[3];
|
||||
VkBuffer vbuffer; // might need to increase at some point
|
||||
VkDeviceSize voffset;
|
||||
VkBuffer ibuffer;
|
||||
VkDeviceSize ioffset;
|
||||
uint32_t count;
|
||||
int16_t instances;
|
||||
VkIndexType indexType;
|
||||
} drawIndexed;
|
||||
struct {
|
||||
uint32_t clearColor;
|
||||
float clearZ;
|
||||
int clearStencil;
|
||||
int clearMask; // VK_IMAGE_ASPECT_COLOR_BIT etc
|
||||
} clear;
|
||||
struct {
|
||||
VkViewport vp;
|
||||
} viewport;
|
||||
struct {
|
||||
VkRect2D scissor;
|
||||
} scissor;
|
||||
struct {
|
||||
uint8_t stencilWriteMask;
|
||||
uint8_t stencilCompareMask;
|
||||
uint8_t stencilRef;
|
||||
} stencil;
|
||||
struct {
|
||||
float color[4];
|
||||
} blendColor;
|
||||
struct {
|
||||
|
||||
} beginRp;
|
||||
struct {
|
||||
|
||||
} endRp;
|
||||
};
|
||||
};
|
||||
|
||||
enum class VKRStepType : uint8_t {
|
||||
RENDER,
|
||||
COPY,
|
||||
BLIT,
|
||||
READBACK,
|
||||
};
|
||||
|
||||
enum class VKRRenderPassAction {
|
||||
DONT_CARE,
|
||||
CLEAR,
|
||||
KEEP,
|
||||
};
|
||||
|
||||
struct TransitionRequest {
|
||||
VKRFramebuffer *fb;
|
||||
VkImageLayout targetLayout;
|
||||
};
|
||||
|
||||
struct VKRStep {
|
||||
VKRStep(VKRStepType _type) : stepType(_type) {}
|
||||
VKRStepType stepType;
|
||||
std::vector<VkRenderData> commands;
|
||||
std::vector<TransitionRequest> preTransitions;
|
||||
union {
|
||||
struct {
|
||||
VKRFramebuffer *framebuffer;
|
||||
VKRRenderPassAction color;
|
||||
VKRRenderPassAction depthStencil;
|
||||
uint32_t clearColor;
|
||||
float clearDepth;
|
||||
int clearStencil;
|
||||
int numDraws;
|
||||
VkImageLayout finalColorLayout;
|
||||
} render;
|
||||
struct {
|
||||
VKRFramebuffer *src;
|
||||
VKRFramebuffer *dst;
|
||||
VkRect2D srcRect;
|
||||
VkOffset2D dstPos;
|
||||
int aspectMask;
|
||||
} copy;
|
||||
struct {
|
||||
VKRFramebuffer *src;
|
||||
VKRFramebuffer *dst;
|
||||
VkRect2D srcRect;
|
||||
VkRect2D dstRect;
|
||||
int aspectMask;
|
||||
VkFilter filter;
|
||||
} blit;
|
||||
struct {
|
||||
VKRFramebuffer *src;
|
||||
void *destPtr;
|
||||
VkRect2D srcRect;
|
||||
} readback;
|
||||
};
|
||||
};
|
||||
|
||||
class VulkanQueueRunner {
|
||||
public:
|
||||
VulkanQueueRunner(VulkanContext *vulkan) : vulkan_(vulkan) {}
|
||||
void SetBackbuffer(VkFramebuffer fb) {
|
||||
backbuffer_ = fb;
|
||||
}
|
||||
void RunSteps(VkCommandBuffer cmd, const std::vector<VKRStep *> &steps);
|
||||
|
||||
void CreateDeviceObjects();
|
||||
void DestroyDeviceObjects();
|
||||
|
||||
VkRenderPass GetBackbufferRenderPass() const {
|
||||
return backbufferRenderPass_;
|
||||
}
|
||||
VkRenderPass GetRenderPass(int i) const {
|
||||
return renderPasses_[i];
|
||||
}
|
||||
|
||||
inline int RPIndex(VKRRenderPassAction color, VKRRenderPassAction depth) {
|
||||
return (int)depth * 3 + (int)color;
|
||||
}
|
||||
|
||||
private:
|
||||
void InitBackbufferRenderPass();
|
||||
void InitRenderpasses();
|
||||
|
||||
void PerformBindFramebufferAsRenderTarget(const VKRStep &pass, VkCommandBuffer cmd);
|
||||
void PerformRenderPass(const VKRStep &pass, VkCommandBuffer cmd);
|
||||
void PerformCopy(const VKRStep &pass, VkCommandBuffer cmd);
|
||||
void PerformBlit(const VKRStep &pass, VkCommandBuffer cmd);
|
||||
|
||||
static void SetupTransitionToTransferSrc(VKRImage &img, VkImageMemoryBarrier &barrier, VkPipelineStageFlags &stage, VkImageAspectFlags aspect);
|
||||
static void SetupTransitionToTransferDst(VKRImage &img, VkImageMemoryBarrier &barrier, VkPipelineStageFlags &stage, VkImageAspectFlags aspect);
|
||||
|
||||
VulkanContext *vulkan_;
|
||||
|
||||
VkFramebuffer backbuffer_;
|
||||
VkFramebuffer curFramebuffer_ = VK_NULL_HANDLE;
|
||||
|
||||
VkRenderPass backbufferRenderPass_ = VK_NULL_HANDLE;
|
||||
// Renderpasses, all combinations of preserving or clearing or dont-care-ing fb contents.
|
||||
// TODO: Create these on demand.
|
||||
VkRenderPass renderPasses_[9]{};
|
||||
};
|
@ -94,7 +94,7 @@ void CreateImage(VulkanContext *vulkan, VkCommandBuffer cmd, VKRImage &img, int
|
||||
img.layout = initialLayout;
|
||||
}
|
||||
|
||||
VulkanRenderManager::VulkanRenderManager(VulkanContext *vulkan) : vulkan_(vulkan) {
|
||||
VulkanRenderManager::VulkanRenderManager(VulkanContext *vulkan) : vulkan_(vulkan), queueRunner_(vulkan) {
|
||||
VkSemaphoreCreateInfo semaphoreCreateInfo = { VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO };
|
||||
semaphoreCreateInfo.flags = 0;
|
||||
VkResult res = vkCreateSemaphore(vulkan_->GetDevice(), &semaphoreCreateInfo, nullptr, &acquireSemaphore_);
|
||||
@ -124,8 +124,7 @@ VulkanRenderManager::VulkanRenderManager(VulkanContext *vulkan) : vulkan_(vulkan
|
||||
frameData_[i].fence = vulkan_->CreateFence(true); // So it can be instantly waited on
|
||||
}
|
||||
|
||||
InitBackbufferRenderPass();
|
||||
InitRenderpasses();
|
||||
queueRunner_.CreateDeviceObjects();
|
||||
}
|
||||
|
||||
void VulkanRenderManager::CreateBackbuffers() {
|
||||
@ -229,12 +228,7 @@ VulkanRenderManager::~VulkanRenderManager() {
|
||||
vkDestroyCommandPool(device, frameData_[i].cmdPoolMain, nullptr);
|
||||
vkDestroyFence(device, frameData_[i].fence, nullptr);
|
||||
}
|
||||
for (int i = 0; i < ARRAY_SIZE(renderPasses_); i++) {
|
||||
assert(renderPasses_[i] != VK_NULL_HANDLE);
|
||||
vkDestroyRenderPass(device, renderPasses_[i], nullptr);
|
||||
}
|
||||
assert(backbufferRenderPass_ != VK_NULL_HANDLE);
|
||||
vkDestroyRenderPass(device, backbufferRenderPass_, nullptr);
|
||||
queueRunner_.DestroyDeviceObjects();
|
||||
}
|
||||
|
||||
// TODO: Activate this code.
|
||||
@ -340,7 +334,7 @@ void VulkanRenderManager::InitBackbufferFramebuffers(int width, int height) {
|
||||
|
||||
VLOG("InitFramebuffers: %dx%d", width, height);
|
||||
VkFramebufferCreateInfo fb_info = { VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO };
|
||||
fb_info.renderPass = backbufferRenderPass_;
|
||||
fb_info.renderPass = queueRunner_.GetBackbufferRenderPass();
|
||||
fb_info.attachmentCount = 2;
|
||||
fb_info.pAttachments = attachments;
|
||||
fb_info.width = width;
|
||||
@ -356,63 +350,6 @@ void VulkanRenderManager::InitBackbufferFramebuffers(int width, int height) {
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanRenderManager::InitBackbufferRenderPass() {
|
||||
VkResult U_ASSERT_ONLY res;
|
||||
|
||||
VkAttachmentDescription attachments[2];
|
||||
attachments[0].format = vulkan_->GetSwapchainFormat();
|
||||
attachments[0].samples = VK_SAMPLE_COUNT_1_BIT;
|
||||
attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
|
||||
attachments[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
|
||||
attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
||||
attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
|
||||
attachments[0].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
||||
attachments[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
||||
attachments[0].flags = 0;
|
||||
|
||||
attachments[1].format = vulkan_->GetDeviceInfo().preferredDepthStencilFormat; // must use this same format later for the back depth buffer.
|
||||
attachments[1].samples = VK_SAMPLE_COUNT_1_BIT;
|
||||
attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
|
||||
attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
|
||||
attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
|
||||
attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
|
||||
attachments[1].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
attachments[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
attachments[1].flags = 0;
|
||||
|
||||
VkAttachmentReference color_reference = {};
|
||||
color_reference.attachment = 0;
|
||||
color_reference.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
||||
|
||||
VkAttachmentReference depth_reference{};
|
||||
depth_reference.attachment = 1;
|
||||
depth_reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
|
||||
VkSubpassDescription subpass = {};
|
||||
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
|
||||
subpass.flags = 0;
|
||||
subpass.inputAttachmentCount = 0;
|
||||
subpass.pInputAttachments = nullptr;
|
||||
subpass.colorAttachmentCount = 1;
|
||||
subpass.pColorAttachments = &color_reference;
|
||||
subpass.pResolveAttachments = nullptr;
|
||||
subpass.pDepthStencilAttachment = &depth_reference;
|
||||
subpass.preserveAttachmentCount = 0;
|
||||
subpass.pPreserveAttachments = nullptr;
|
||||
|
||||
VkRenderPassCreateInfo rp_info = { VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO };
|
||||
rp_info.pNext = nullptr;
|
||||
rp_info.attachmentCount = 2;
|
||||
rp_info.pAttachments = attachments;
|
||||
rp_info.subpassCount = 1;
|
||||
rp_info.pSubpasses = &subpass;
|
||||
rp_info.dependencyCount = 0;
|
||||
rp_info.pDependencies = nullptr;
|
||||
|
||||
res = vkCreateRenderPass(vulkan_->GetDevice(), &rp_info, nullptr, &backbufferRenderPass_);
|
||||
assert(res == VK_SUCCESS);
|
||||
}
|
||||
|
||||
void VulkanRenderManager::InitDepthStencilBuffer(VkCommandBuffer cmd) {
|
||||
VkResult U_ASSERT_ONLY res;
|
||||
bool U_ASSERT_ONLY pass;
|
||||
@ -486,85 +423,6 @@ void VulkanRenderManager::InitDepthStencilBuffer(VkCommandBuffer cmd) {
|
||||
assert(res == VK_SUCCESS);
|
||||
}
|
||||
|
||||
void VulkanRenderManager::InitRenderpasses() {
|
||||
// Create a bunch of render pass objects, for normal rendering with a depth buffer,
|
||||
// with clearing, without clearing, and dont-care for both depth/stencil and color, so 3*3=9 combos.
|
||||
VkAttachmentDescription attachments[2] = {};
|
||||
attachments[0].format = VK_FORMAT_R8G8B8A8_UNORM;
|
||||
attachments[0].samples = VK_SAMPLE_COUNT_1_BIT;
|
||||
attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
|
||||
attachments[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
|
||||
attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
||||
attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
|
||||
attachments[0].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
||||
attachments[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
||||
attachments[0].flags = 0;
|
||||
|
||||
attachments[1].format = vulkan_->GetDeviceInfo().preferredDepthStencilFormat;
|
||||
attachments[1].samples = VK_SAMPLE_COUNT_1_BIT;
|
||||
attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
|
||||
attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
|
||||
attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
|
||||
attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
|
||||
attachments[1].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
attachments[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
attachments[1].flags = 0;
|
||||
|
||||
VkAttachmentReference color_reference = {};
|
||||
color_reference.attachment = 0;
|
||||
color_reference.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
||||
|
||||
VkAttachmentReference depth_reference = {};
|
||||
depth_reference.attachment = 1;
|
||||
depth_reference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
|
||||
VkSubpassDescription subpass = {};
|
||||
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
|
||||
subpass.flags = 0;
|
||||
subpass.inputAttachmentCount = 0;
|
||||
subpass.pInputAttachments = nullptr;
|
||||
subpass.colorAttachmentCount = 1;
|
||||
subpass.pColorAttachments = &color_reference;
|
||||
subpass.pResolveAttachments = nullptr;
|
||||
subpass.pDepthStencilAttachment = &depth_reference;
|
||||
subpass.preserveAttachmentCount = 0;
|
||||
subpass.pPreserveAttachments = nullptr;
|
||||
|
||||
VkRenderPassCreateInfo rp = { VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO };
|
||||
rp.attachmentCount = 2;
|
||||
rp.pAttachments = attachments;
|
||||
rp.subpassCount = 1;
|
||||
rp.pSubpasses = &subpass;
|
||||
rp.dependencyCount = 0;
|
||||
rp.pDependencies = nullptr;
|
||||
|
||||
for (int depth = 0; depth < 3; depth++) {
|
||||
switch ((VKRRenderPassAction)depth) {
|
||||
case VKRRenderPassAction::CLEAR:
|
||||
attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
|
||||
attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
|
||||
break;
|
||||
case VKRRenderPassAction::KEEP:
|
||||
attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
|
||||
attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
|
||||
break;
|
||||
case VKRRenderPassAction::DONT_CARE:
|
||||
attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
||||
attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
||||
break;
|
||||
}
|
||||
for (int color = 0; color < 3; color++) {
|
||||
switch ((VKRRenderPassAction)color) {
|
||||
case VKRRenderPassAction::CLEAR: attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; break;
|
||||
case VKRRenderPassAction::KEEP: attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD; break;
|
||||
case VKRRenderPassAction::DONT_CARE: attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; break;
|
||||
}
|
||||
int index = RPIndex((VKRRenderPassAction)color, (VKRRenderPassAction)depth);
|
||||
vkCreateRenderPass(vulkan_->GetDevice(), &rp, nullptr, &renderPasses_[index]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanRenderManager::Clear(uint32_t clearColor, float clearZ, int clearStencil, int clearMask) {
|
||||
_dbg_assert_(G3D, curRenderStep_ && curRenderStep_->stepType == VKRStepType::RENDER);
|
||||
// If this is the first drawing command, merge it into the pass.
|
||||
@ -605,7 +463,6 @@ void VulkanRenderManager::CopyFramebuffer(VKRFramebuffer *src, VkRect2D srcRect,
|
||||
std::unique_lock<std::mutex> lock(mutex_);
|
||||
steps_.push_back(step);
|
||||
curRenderStep_ = nullptr;
|
||||
curFramebuffer_ = VK_NULL_HANDLE;
|
||||
}
|
||||
|
||||
void VulkanRenderManager::BlitFramebuffer(VKRFramebuffer *src, VkRect2D srcRect, VKRFramebuffer *dst, VkRect2D dstRect, int aspectMask, VkFilter filter) {
|
||||
@ -631,7 +488,6 @@ void VulkanRenderManager::BlitFramebuffer(VKRFramebuffer *src, VkRect2D srcRect,
|
||||
std::unique_lock<std::mutex> lock(mutex_);
|
||||
steps_.push_back(step);
|
||||
curRenderStep_ = nullptr;
|
||||
curFramebuffer_ = VK_NULL_HANDLE;
|
||||
}
|
||||
|
||||
VkImageView VulkanRenderManager::BindFramebufferAsTexture(VKRFramebuffer *fb, int binding, int aspectBit, int attachment) {
|
||||
@ -655,7 +511,6 @@ VkImageView VulkanRenderManager::BindFramebufferAsTexture(VKRFramebuffer *fb, in
|
||||
|
||||
void VulkanRenderManager::Flush() {
|
||||
curRenderStep_ = nullptr;
|
||||
curFramebuffer_ = VK_NULL_HANDLE;
|
||||
int curFrame = vulkan_->GetCurFrame();
|
||||
FrameData &frameData = frameData_[curFrame];
|
||||
if (frameData.hasInitCommands) {
|
||||
@ -676,7 +531,7 @@ void VulkanRenderManager::Flush() {
|
||||
|
||||
void VulkanRenderManager::Run(int frame) {
|
||||
FrameData &frameData = frameData_[frame];
|
||||
auto &stepsOnThread_ = frameData_[frame].steps;
|
||||
auto &stepsOnThread = frameData_[frame].steps;
|
||||
VkDevice device = vulkan_->GetDevice();
|
||||
|
||||
uint32_t curSwapchainImage = 0;
|
||||
@ -687,10 +542,10 @@ void VulkanRenderManager::Run(int frame) {
|
||||
|
||||
VkCommandBuffer cmd = frameData.mainCmd;
|
||||
|
||||
VkCommandBufferBeginInfo begin = { VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO };
|
||||
VkCommandBufferBeginInfo begin{ VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO };
|
||||
begin.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
|
||||
begin.pInheritanceInfo = nullptr;
|
||||
res = vkBeginCommandBuffer(cmd, &begin);
|
||||
|
||||
assert(res == VK_SUCCESS);
|
||||
|
||||
// TODO: Deal with the VK_SUBOPTIMAL_KHR and VK_ERROR_OUT_OF_DATE_KHR
|
||||
@ -699,26 +554,10 @@ void VulkanRenderManager::Run(int frame) {
|
||||
assert(res == VK_SUCCESS);
|
||||
TransitionFromPresent(cmd, swapchainImages_[curSwapchainImage].image);
|
||||
|
||||
// Optimizes renderpasses, then sequences them.
|
||||
for (int i = 0; i < stepsOnThread_.size(); i++) {
|
||||
const VKRStep &step = *stepsOnThread_[i];
|
||||
switch (step.stepType) {
|
||||
case VKRStepType::RENDER:
|
||||
PerformRenderPass(step, cmd, curSwapchainImage);
|
||||
break;
|
||||
case VKRStepType::COPY:
|
||||
PerformCopy(step, cmd);
|
||||
break;
|
||||
case VKRStepType::BLIT:
|
||||
PerformBlit(step, cmd);
|
||||
break;
|
||||
case VKRStepType::READBACK:
|
||||
// PerformReadback
|
||||
break;
|
||||
}
|
||||
delete stepsOnThread_[i];
|
||||
}
|
||||
stepsOnThread_.clear();
|
||||
queueRunner_.SetBackbuffer(framebuffers_[curSwapchainImage]);
|
||||
queueRunner_.RunSteps(cmd, stepsOnThread);
|
||||
stepsOnThread.clear();
|
||||
|
||||
insideFrame_ = false;
|
||||
|
||||
TransitionToPresent(frameData.mainCmd, swapchainImages_[curSwapchainImage].image);
|
||||
@ -742,6 +581,7 @@ void VulkanRenderManager::Run(int frame) {
|
||||
VkSubmitInfo submit_info = { VK_STRUCTURE_TYPE_SUBMIT_INFO };
|
||||
submit_info.waitSemaphoreCount = 1;
|
||||
submit_info.pWaitSemaphores = &acquireSemaphore_;
|
||||
|
||||
VkPipelineStageFlags waitStage[1] = { VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT };
|
||||
submit_info.pWaitDstStageMask = waitStage;
|
||||
submit_info.commandBufferCount = (uint32_t)cmdBufs.size();
|
||||
@ -777,552 +617,3 @@ void VulkanRenderManager::Run(int frame) {
|
||||
|
||||
VLOG("PULL: Finished running frame %d", frame);
|
||||
}
|
||||
|
||||
void VulkanRenderManager::PerformRenderPass(const VKRStep &step, VkCommandBuffer cmd, int swapChainImage) {
|
||||
// TODO: If there are multiple, we can transition them together.
|
||||
for (const auto &iter : step.preTransitions) {
|
||||
if (iter.fb->color.layout != iter.targetLayout) {
|
||||
VkImageMemoryBarrier barrier{};
|
||||
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
||||
barrier.oldLayout = iter.fb->color.layout;
|
||||
barrier.subresourceRange.layerCount = 1;
|
||||
barrier.subresourceRange.levelCount = 1;
|
||||
barrier.image = iter.fb->color.image;
|
||||
barrier.srcAccessMask = 0;
|
||||
VkPipelineStageFlags srcStage;
|
||||
VkPipelineStageFlags dstStage;
|
||||
switch (barrier.oldLayout) {
|
||||
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
break;
|
||||
}
|
||||
barrier.newLayout = iter.targetLayout;
|
||||
switch (barrier.newLayout) {
|
||||
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
|
||||
barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
||||
dstStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
break;
|
||||
}
|
||||
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
|
||||
vkCmdPipelineBarrier(cmd, srcStage, dstStage, 0, 0, nullptr, 0, nullptr, 1, &barrier);
|
||||
iter.fb->color.layout = barrier.newLayout;
|
||||
}
|
||||
}
|
||||
|
||||
// This is supposed to bind a vulkan render pass to the command buffer.
|
||||
PerformBindFramebufferAsRenderTarget(step, cmd, swapChainImage);
|
||||
|
||||
VKRFramebuffer *fb = step.render.framebuffer;
|
||||
|
||||
VkPipeline lastPipeline = VK_NULL_HANDLE;
|
||||
|
||||
auto &commands = step.commands;
|
||||
|
||||
// TODO: Dynamic state commands (SetViewport, SetScissor, SetBlendConstants, SetStencil*) are only
|
||||
// valid when a pipeline is bound with those as dynamic state. So we need to add some state tracking here
|
||||
// for this to be correct. This is a bit of a pain but also will let us eliminate redundant calls.
|
||||
|
||||
for (const auto &c : commands) {
|
||||
switch (c.cmd) {
|
||||
case VKRRenderCommand::BIND_PIPELINE:
|
||||
if (c.pipeline.pipeline != lastPipeline) {
|
||||
vkCmdBindPipeline(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS, c.pipeline.pipeline);
|
||||
lastPipeline = c.pipeline.pipeline;
|
||||
}
|
||||
break;
|
||||
|
||||
case VKRRenderCommand::VIEWPORT:
|
||||
vkCmdSetViewport(cmd, 0, 1, &c.viewport.vp);
|
||||
break;
|
||||
|
||||
case VKRRenderCommand::SCISSOR:
|
||||
vkCmdSetScissor(cmd, 0, 1, &c.scissor.scissor);
|
||||
break;
|
||||
|
||||
case VKRRenderCommand::BLEND:
|
||||
vkCmdSetBlendConstants(cmd, c.blendColor.color);
|
||||
break;
|
||||
|
||||
case VKRRenderCommand::STENCIL:
|
||||
vkCmdSetStencilWriteMask(cmd, VK_STENCIL_FRONT_AND_BACK, c.stencil.stencilWriteMask);
|
||||
vkCmdSetStencilCompareMask(cmd, VK_STENCIL_FRONT_AND_BACK, c.stencil.stencilCompareMask);
|
||||
vkCmdSetStencilReference(cmd, VK_STENCIL_FRONT_AND_BACK, c.stencil.stencilRef);
|
||||
break;
|
||||
|
||||
case VKRRenderCommand::DRAW_INDEXED:
|
||||
vkCmdBindDescriptorSets(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS, c.drawIndexed.pipelineLayout, 0, 1, &c.drawIndexed.ds, c.drawIndexed.numUboOffsets, c.drawIndexed.uboOffsets);
|
||||
vkCmdBindIndexBuffer(cmd, c.drawIndexed.ibuffer, c.drawIndexed.ioffset, VK_INDEX_TYPE_UINT16);
|
||||
vkCmdBindVertexBuffers(cmd, 0, 1, &c.drawIndexed.vbuffer, &c.drawIndexed.voffset);
|
||||
vkCmdDrawIndexed(cmd, c.drawIndexed.count, c.drawIndexed.instances, 0, 0, 0);
|
||||
break;
|
||||
|
||||
case VKRRenderCommand::DRAW:
|
||||
vkCmdBindDescriptorSets(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS, c.draw.pipelineLayout, 0, 1, &c.draw.ds, c.draw.numUboOffsets, c.draw.uboOffsets);
|
||||
vkCmdBindVertexBuffers(cmd, 0, 1, &c.draw.vbuffer, &c.draw.voffset);
|
||||
vkCmdDraw(cmd, c.draw.count, 1, 0, 0);
|
||||
break;
|
||||
|
||||
case VKRRenderCommand::CLEAR:
|
||||
{
|
||||
int numAttachments = 0;
|
||||
VkClearRect rc{};
|
||||
rc.baseArrayLayer = 0;
|
||||
rc.layerCount = 1;
|
||||
rc.rect.extent.width = curWidth_;
|
||||
rc.rect.extent.height = curHeight_;
|
||||
VkClearAttachment attachments[2];
|
||||
if (c.clear.clearMask & VK_IMAGE_ASPECT_COLOR_BIT) {
|
||||
VkClearAttachment &attachment = attachments[numAttachments++];
|
||||
attachment.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
attachment.colorAttachment = 0;
|
||||
Uint8x4ToFloat4(attachment.clearValue.color.float32, c.clear.clearColor);
|
||||
}
|
||||
if (c.clear.clearMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
|
||||
VkClearAttachment &attachment = attachments[numAttachments++];
|
||||
attachment.aspectMask = 0;
|
||||
if (c.clear.clearMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
|
||||
attachment.clearValue.depthStencil.depth = c.clear.clearZ;
|
||||
attachment.aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
|
||||
}
|
||||
if (c.clear.clearMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
|
||||
attachment.clearValue.depthStencil.stencil = c.clear.clearStencil;
|
||||
attachment.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
||||
}
|
||||
}
|
||||
if (numAttachments) {
|
||||
vkCmdClearAttachments(cmd, numAttachments, attachments, 1, &rc);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
ELOG("Unimpl queue command");
|
||||
;
|
||||
}
|
||||
}
|
||||
vkCmdEndRenderPass(cmd);
|
||||
|
||||
// Transition the framebuffer if requested.
|
||||
if (fb && step.render.finalColorLayout != VK_IMAGE_LAYOUT_UNDEFINED) {
|
||||
VkImageMemoryBarrier barrier{};
|
||||
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
||||
barrier.oldLayout = fb->color.layout;
|
||||
barrier.subresourceRange.layerCount = 1;
|
||||
barrier.subresourceRange.levelCount = 1;
|
||||
barrier.image = fb->color.image;
|
||||
barrier.srcAccessMask = 0;
|
||||
switch (barrier.oldLayout) {
|
||||
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
}
|
||||
barrier.newLayout = step.render.finalColorLayout;
|
||||
switch (barrier.newLayout) {
|
||||
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
|
||||
barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
}
|
||||
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
|
||||
// we're between passes so it's OK.
|
||||
// ARM Best Practices guide recommends these stage bits.
|
||||
vkCmdPipelineBarrier(cmd, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 0, nullptr, 0, nullptr, 1, &barrier);
|
||||
fb->color.layout = barrier.newLayout;
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanRenderManager::PerformBindFramebufferAsRenderTarget(const VKRStep &step, VkCommandBuffer cmd, int swapChainImage) {
|
||||
VkFramebuffer framebuf;
|
||||
int w;
|
||||
int h;
|
||||
VkImageLayout prevLayout;
|
||||
if (step.render.framebuffer) {
|
||||
VKRFramebuffer *fb = step.render.framebuffer;
|
||||
framebuf = fb->framebuf;
|
||||
w = fb->width;
|
||||
h = fb->height;
|
||||
prevLayout = fb->color.layout;
|
||||
} else {
|
||||
framebuf = framebuffers_[swapChainImage];
|
||||
w = vulkan_->GetBackbufferWidth();
|
||||
h = vulkan_->GetBackbufferHeight();
|
||||
}
|
||||
|
||||
#if 0
|
||||
// This part is based on faulty old thinking.
|
||||
if (framebuf == curFramebuffer_) {
|
||||
if (framebuf == 0)
|
||||
Crash();
|
||||
|
||||
// If we're asking to clear, but already bound, we'll just keep it bound but send a clear command.
|
||||
// We will try to avoid this as much as possible.
|
||||
VkClearAttachment clear[2]{};
|
||||
int count = 0;
|
||||
if (step.render.color == VKRRenderPassAction::CLEAR) {
|
||||
clear[count].aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
Uint8x4ToFloat4(clear[count].clearValue.color.float32, step.render.clearColor);
|
||||
clear[count].colorAttachment = 0;
|
||||
count++;
|
||||
}
|
||||
|
||||
if (step.render.depthStencil == VKRRenderPassAction::CLEAR) {
|
||||
clear[count].aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
|
||||
clear[count].clearValue.depthStencil.depth = step.render.clearDepth;
|
||||
clear[count].clearValue.depthStencil.stencil = step.render.clearStencil;
|
||||
clear[count].colorAttachment = 0;
|
||||
count++;
|
||||
}
|
||||
|
||||
if (count > 0) {
|
||||
VkClearRect rc{ { 0,0,(uint32_t)w,(uint32_t)h }, 0, 1 };
|
||||
vkCmdClearAttachments(cmd, count, clear, 1, &rc);
|
||||
}
|
||||
// We're done.
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
|
||||
VkRenderPass renderPass;
|
||||
int numClearVals = 0;
|
||||
VkClearValue clearVal[2];
|
||||
memset(clearVal, 0, sizeof(clearVal));
|
||||
if (step.render.framebuffer) {
|
||||
VKRFramebuffer *fb = step.render.framebuffer;
|
||||
// Now, if the image needs transitioning, let's transition.
|
||||
// The backbuffer does not, that's handled by VulkanContext.
|
||||
if (step.render.framebuffer->color.layout != VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
|
||||
VkAccessFlags srcAccessMask;
|
||||
VkPipelineStageFlags srcStage;
|
||||
switch (fb->color.layout) {
|
||||
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
|
||||
srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
|
||||
srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
|
||||
srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
break;
|
||||
}
|
||||
|
||||
TransitionImageLayout2(cmd, fb->color.image, VK_IMAGE_ASPECT_COLOR_BIT,
|
||||
fb->color.layout, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
|
||||
srcStage, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
|
||||
srcAccessMask, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT);
|
||||
fb->color.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
||||
}
|
||||
if (fb->depth.layout != VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL) {
|
||||
VkAccessFlags srcAccessMask;
|
||||
VkPipelineStageFlags srcStage;
|
||||
switch (fb->depth.layout) {
|
||||
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
|
||||
srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
|
||||
srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
|
||||
srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
srcStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
break;
|
||||
}
|
||||
TransitionImageLayout2(cmd, fb->depth.image, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT,
|
||||
fb->depth.layout, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
|
||||
srcStage, VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
|
||||
srcAccessMask, VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT);
|
||||
fb->depth.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
||||
}
|
||||
|
||||
renderPass = renderPasses_[RPIndex(step.render.color, step.render.depthStencil)];
|
||||
// VLOG("Switching framebuffer to FBO (fc=%d, cmd=%x, rp=%x)", frameNum_, (int)(uintptr_t)cmd_, (int)(uintptr_t)renderPass);
|
||||
if (step.render.color == VKRRenderPassAction::CLEAR) {
|
||||
Uint8x4ToFloat4(clearVal[0].color.float32, step.render.clearColor);
|
||||
numClearVals = 1;
|
||||
}
|
||||
if (step.render.depthStencil == VKRRenderPassAction::CLEAR) {
|
||||
clearVal[1].depthStencil.depth = step.render.clearDepth;
|
||||
clearVal[1].depthStencil.stencil = step.render.clearStencil;
|
||||
numClearVals = 2;
|
||||
}
|
||||
} else {
|
||||
renderPass = GetBackbufferRenderpass();
|
||||
numClearVals = 2; // We don't bother with a depth buffer here.
|
||||
clearVal[1].depthStencil.depth = 0.0f;
|
||||
clearVal[1].depthStencil.stencil = 0;
|
||||
}
|
||||
|
||||
VkRenderPassBeginInfo rp_begin = { VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO };
|
||||
rp_begin.renderPass = renderPass;
|
||||
rp_begin.framebuffer = framebuf;
|
||||
rp_begin.renderArea.offset.x = 0;
|
||||
rp_begin.renderArea.offset.y = 0;
|
||||
rp_begin.renderArea.extent.width = w;
|
||||
rp_begin.renderArea.extent.height = h;
|
||||
rp_begin.clearValueCount = numClearVals;
|
||||
rp_begin.pClearValues = numClearVals ? clearVal : nullptr;
|
||||
vkCmdBeginRenderPass(cmd, &rp_begin, VK_SUBPASS_CONTENTS_INLINE);
|
||||
curWidth_ = w;
|
||||
curHeight_ = h;
|
||||
curFramebuffer_ = framebuf;
|
||||
}
|
||||
|
||||
void VulkanRenderManager::PerformCopy(const VKRStep &step, VkCommandBuffer cmd) {
|
||||
VKRFramebuffer *src = step.copy.src;
|
||||
VKRFramebuffer *dst = step.copy.dst;
|
||||
|
||||
VkImageCopy copy{};
|
||||
copy.srcOffset.x = step.copy.srcRect.offset.x;
|
||||
copy.srcOffset.y = step.copy.srcRect.offset.y;
|
||||
copy.srcOffset.z = 0;
|
||||
copy.srcSubresource.mipLevel = 0;
|
||||
copy.srcSubresource.layerCount = 1;
|
||||
copy.dstOffset.x = step.copy.dstPos.x;
|
||||
copy.dstOffset.y = step.copy.dstPos.y;
|
||||
copy.dstOffset.z = 0;
|
||||
copy.dstSubresource.mipLevel = 0;
|
||||
copy.dstSubresource.layerCount = 1;
|
||||
copy.extent.width = step.copy.srcRect.extent.width;
|
||||
copy.extent.height = step.copy.srcRect.extent.height;
|
||||
copy.extent.depth = 1;
|
||||
|
||||
VkImageMemoryBarrier srcBarriers[2]{};
|
||||
VkImageMemoryBarrier dstBarriers[2]{};
|
||||
int srcCount = 0;
|
||||
int dstCount = 0;
|
||||
|
||||
VkPipelineStageFlags srcStage = 0;
|
||||
VkPipelineStageFlags dstStage = 0;
|
||||
// First source barriers.
|
||||
if (step.copy.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
|
||||
if (src->color.layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) {
|
||||
SetupTransitionToTransferSrc(src->color, srcBarriers[srcCount++], srcStage, VK_IMAGE_ASPECT_COLOR_BIT);
|
||||
}
|
||||
if (dst->color.layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
|
||||
SetupTransitionToTransferDst(dst->color, dstBarriers[dstCount++], dstStage, VK_IMAGE_ASPECT_COLOR_BIT);
|
||||
}
|
||||
}
|
||||
|
||||
// We can't copy only depth or only stencil unfortunately.
|
||||
if (step.copy.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
|
||||
if (src->depth.layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) {
|
||||
SetupTransitionToTransferSrc(src->depth, srcBarriers[srcCount++], srcStage, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
|
||||
}
|
||||
if (dst->depth.layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
|
||||
SetupTransitionToTransferDst(dst->depth, dstBarriers[dstCount++], dstStage, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
|
||||
}
|
||||
}
|
||||
|
||||
if (srcCount) {
|
||||
vkCmdPipelineBarrier(cmd, srcStage, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, srcCount, srcBarriers);
|
||||
}
|
||||
if (dstCount) {
|
||||
vkCmdPipelineBarrier(cmd, dstStage, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, dstCount, dstBarriers);
|
||||
}
|
||||
|
||||
if (step.copy.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
|
||||
copy.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
copy.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
vkCmdCopyImage(cmd, src->color.image, src->color.layout, dst->color.image, dst->color.layout, 1, ©);
|
||||
}
|
||||
if (step.copy.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
|
||||
copy.srcSubresource.aspectMask = 0;
|
||||
copy.dstSubresource.aspectMask = 0;
|
||||
if (step.copy.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
|
||||
copy.srcSubresource.aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
|
||||
copy.dstSubresource.aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
|
||||
}
|
||||
if (step.copy.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
|
||||
copy.srcSubresource.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
||||
copy.dstSubresource.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
||||
}
|
||||
vkCmdCopyImage(cmd, src->depth.image, src->depth.layout, dst->depth.image, dst->depth.layout, 1, ©);
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanRenderManager::PerformBlit(const VKRStep &step, VkCommandBuffer cmd) {
|
||||
VkImageMemoryBarrier srcBarriers[2]{};
|
||||
VkImageMemoryBarrier dstBarriers[2]{};
|
||||
|
||||
VKRFramebuffer *src = step.blit.src;
|
||||
VKRFramebuffer *dst = step.blit.dst;
|
||||
|
||||
// If any validation needs to be performed here, it should probably have been done
|
||||
// already when the blit was queued. So don't validate here.
|
||||
VkImageBlit blit{};
|
||||
blit.srcOffsets[0].x = step.blit.srcRect.offset.x;
|
||||
blit.srcOffsets[0].y = step.blit.srcRect.offset.y;
|
||||
blit.srcOffsets[0].z = 0;
|
||||
blit.srcOffsets[1].x = step.blit.srcRect.offset.x + step.blit.srcRect.extent.width;
|
||||
blit.srcOffsets[1].y = step.blit.srcRect.offset.y + step.blit.srcRect.extent.height;
|
||||
blit.srcOffsets[1].z = 1;
|
||||
blit.srcSubresource.mipLevel = 0;
|
||||
blit.srcSubresource.layerCount = 1;
|
||||
blit.dstOffsets[0].x = step.blit.dstRect.offset.x;
|
||||
blit.dstOffsets[0].y = step.blit.dstRect.offset.y;
|
||||
blit.dstOffsets[0].z = 0;
|
||||
blit.dstOffsets[1].x = step.blit.dstRect.offset.x + step.blit.dstRect.extent.width;
|
||||
blit.dstOffsets[1].y = step.blit.dstRect.offset.y + step.blit.dstRect.extent.height;
|
||||
blit.dstOffsets[1].z = 1;
|
||||
blit.dstSubresource.mipLevel = 0;
|
||||
blit.dstSubresource.layerCount = 1;
|
||||
|
||||
VkPipelineStageFlags srcStage = 0;
|
||||
VkPipelineStageFlags dstStage = 0;
|
||||
|
||||
int srcCount = 0;
|
||||
int dstCount = 0;
|
||||
|
||||
// First source barriers.
|
||||
if (step.blit.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
|
||||
if (src->color.layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) {
|
||||
SetupTransitionToTransferSrc(src->color, srcBarriers[srcCount++], srcStage, VK_IMAGE_ASPECT_COLOR_BIT);
|
||||
}
|
||||
if (dst->color.layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
|
||||
SetupTransitionToTransferDst(dst->color, dstBarriers[dstCount++], dstStage, VK_IMAGE_ASPECT_COLOR_BIT);
|
||||
}
|
||||
}
|
||||
|
||||
// We can't copy only depth or only stencil unfortunately.
|
||||
if (step.blit.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
|
||||
if (src->depth.layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) {
|
||||
SetupTransitionToTransferSrc(src->depth, srcBarriers[srcCount++], srcStage, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
|
||||
}
|
||||
if (dst->depth.layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
|
||||
SetupTransitionToTransferDst(dst->depth, dstBarriers[dstCount++], dstStage, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
|
||||
}
|
||||
}
|
||||
|
||||
if (srcCount) {
|
||||
vkCmdPipelineBarrier(cmd, srcStage, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, srcCount, srcBarriers);
|
||||
}
|
||||
if (dstCount) {
|
||||
vkCmdPipelineBarrier(cmd, dstStage, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, dstCount, dstBarriers);
|
||||
}
|
||||
|
||||
if (step.blit.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
|
||||
blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
||||
vkCmdBlitImage(cmd, src->color.image, src->color.layout, dst->color.image, dst->color.layout, 1, &blit, step.blit.filter);
|
||||
}
|
||||
|
||||
// TODO: Need to check if the depth format is blittable.
|
||||
// Actually, we should probably almost always use copies rather than blits for depth buffers.
|
||||
if (step.blit.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
|
||||
blit.srcSubresource.aspectMask = 0;
|
||||
blit.dstSubresource.aspectMask = 0;
|
||||
if (step.blit.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
|
||||
blit.srcSubresource.aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
|
||||
blit.dstSubresource.aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
|
||||
}
|
||||
if (step.blit.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
|
||||
blit.srcSubresource.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
||||
blit.dstSubresource.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
|
||||
}
|
||||
vkCmdBlitImage(cmd, src->depth.image, src->depth.layout, dst->depth.image, dst->depth.layout, 1, &blit, step.blit.filter);
|
||||
}
|
||||
}
|
||||
|
||||
void VulkanRenderManager::SetupTransitionToTransferSrc(VKRImage &img, VkImageMemoryBarrier &barrier, VkPipelineStageFlags &stage, VkImageAspectFlags aspect) {
|
||||
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
||||
barrier.oldLayout = img.layout;
|
||||
barrier.subresourceRange.layerCount = 1;
|
||||
barrier.subresourceRange.levelCount = 1;
|
||||
barrier.image = img.image;
|
||||
barrier.srcAccessMask = 0;
|
||||
switch (img.layout) {
|
||||
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
}
|
||||
barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
||||
barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
|
||||
barrier.subresourceRange.aspectMask = aspect;
|
||||
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
img.layout = barrier.newLayout;
|
||||
}
|
||||
|
||||
void VulkanRenderManager::SetupTransitionToTransferDst(VKRImage &img, VkImageMemoryBarrier &barrier, VkPipelineStageFlags &stage, VkImageAspectFlags aspect) {
|
||||
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
||||
barrier.oldLayout = img.layout;
|
||||
barrier.subresourceRange.layerCount = 1;
|
||||
barrier.subresourceRange.levelCount = 1;
|
||||
barrier.image = img.image;
|
||||
barrier.srcAccessMask = 0;
|
||||
switch (img.layout) {
|
||||
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
|
||||
break;
|
||||
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
|
||||
barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
||||
stage |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
break;
|
||||
default:
|
||||
Crash();
|
||||
}
|
||||
barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
||||
barrier.subresourceRange.aspectMask = aspect;
|
||||
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
||||
img.layout = barrier.newLayout;
|
||||
}
|
||||
|
@ -8,140 +8,12 @@
|
||||
#include "Common/Vulkan/VulkanContext.h"
|
||||
#include "math/dataconv.h"
|
||||
#include "thin3d/thin3d.h"
|
||||
#include "thin3d/VulkanQueueRunner.h"
|
||||
|
||||
// Takes the role that a GL driver does of sequencing and optimizing render passes.
|
||||
// Only draws and binds are handled here, resource creation and allocations are handled as normal -
|
||||
// that's the nice thing with Vulkan.
|
||||
|
||||
// The cool thing is that you can Flush on a different thread than you record the commands on!
|
||||
|
||||
enum class VKRRenderCommand : uint8_t {
|
||||
BIND_PIPELINE,
|
||||
STENCIL,
|
||||
BLEND,
|
||||
VIEWPORT,
|
||||
SCISSOR,
|
||||
CLEAR,
|
||||
DRAW,
|
||||
DRAW_INDEXED,
|
||||
};
|
||||
|
||||
struct VkRenderData {
|
||||
VKRRenderCommand cmd;
|
||||
union {
|
||||
struct {
|
||||
VkPipeline pipeline;
|
||||
} pipeline;
|
||||
struct {
|
||||
VkPipelineLayout pipelineLayout;
|
||||
VkDescriptorSet ds;
|
||||
int numUboOffsets;
|
||||
uint32_t uboOffsets[3];
|
||||
VkBuffer vbuffer;
|
||||
VkDeviceSize voffset;
|
||||
uint32_t count;
|
||||
} draw;
|
||||
struct {
|
||||
VkPipelineLayout pipelineLayout;
|
||||
VkDescriptorSet ds;
|
||||
int numUboOffsets;
|
||||
uint32_t uboOffsets[3];
|
||||
VkBuffer vbuffer; // might need to increase at some point
|
||||
VkDeviceSize voffset;
|
||||
VkBuffer ibuffer;
|
||||
VkDeviceSize ioffset;
|
||||
uint32_t count;
|
||||
int16_t instances;
|
||||
VkIndexType indexType;
|
||||
} drawIndexed;
|
||||
struct {
|
||||
uint32_t clearColor;
|
||||
float clearZ;
|
||||
int clearStencil;
|
||||
int clearMask; // VK_IMAGE_ASPECT_COLOR_BIT etc
|
||||
} clear;
|
||||
struct {
|
||||
VkViewport vp;
|
||||
} viewport;
|
||||
struct {
|
||||
VkRect2D scissor;
|
||||
} scissor;
|
||||
struct {
|
||||
uint8_t stencilWriteMask;
|
||||
uint8_t stencilCompareMask;
|
||||
uint8_t stencilRef;
|
||||
} stencil;
|
||||
struct {
|
||||
float color[4];
|
||||
} blendColor;
|
||||
struct {
|
||||
|
||||
} beginRp;
|
||||
struct {
|
||||
|
||||
} endRp;
|
||||
};
|
||||
};
|
||||
|
||||
enum class VKRStepType : uint8_t {
|
||||
RENDER,
|
||||
COPY,
|
||||
BLIT,
|
||||
READBACK,
|
||||
};
|
||||
|
||||
class VKRFramebuffer;
|
||||
|
||||
enum class VKRRenderPassAction {
|
||||
DONT_CARE,
|
||||
CLEAR,
|
||||
KEEP,
|
||||
};
|
||||
|
||||
struct TransitionRequest {
|
||||
VKRFramebuffer *fb;
|
||||
VkImageLayout targetLayout;
|
||||
};
|
||||
|
||||
struct VKRStep {
|
||||
VKRStep(VKRStepType _type) : stepType(_type) {}
|
||||
VKRStepType stepType;
|
||||
std::vector<VkRenderData> commands;
|
||||
std::vector<TransitionRequest> preTransitions;
|
||||
union {
|
||||
struct {
|
||||
VKRFramebuffer *framebuffer;
|
||||
VKRRenderPassAction color;
|
||||
VKRRenderPassAction depthStencil;
|
||||
uint32_t clearColor;
|
||||
float clearDepth;
|
||||
int clearStencil;
|
||||
int numDraws;
|
||||
VkImageLayout finalColorLayout;
|
||||
} render;
|
||||
struct {
|
||||
VKRFramebuffer *src;
|
||||
VKRFramebuffer *dst;
|
||||
VkRect2D srcRect;
|
||||
VkOffset2D dstPos;
|
||||
int aspectMask;
|
||||
} copy;
|
||||
struct {
|
||||
VKRFramebuffer *src;
|
||||
VKRFramebuffer *dst;
|
||||
VkRect2D srcRect;
|
||||
VkRect2D dstRect;
|
||||
int aspectMask;
|
||||
VkFilter filter;
|
||||
} blit;
|
||||
struct {
|
||||
VKRFramebuffer *src;
|
||||
void *destPtr;
|
||||
VkRect2D srcRect;
|
||||
} readback;
|
||||
};
|
||||
};
|
||||
|
||||
// Simple independent framebuffer image. Gets its own allocation, we don't have that many framebuffers so it's fine
|
||||
// to let them have individual non-pooled allocations. Until it's not fine. We'll see.
|
||||
struct VKRImage {
|
||||
@ -211,6 +83,7 @@ public:
|
||||
void Run(int frame);
|
||||
// Bad for performance but sometimes necessary for synchronous CPU readbacks (screenshots and whatnot).
|
||||
void Sync();
|
||||
void RunSteps(VkCommandBuffer cmd, const std::vector<VKRStep *> &steps, int curSwapChainImage);
|
||||
|
||||
void BindFramebufferAsRenderTarget(VKRFramebuffer *fb, VKRRenderPassAction color, VKRRenderPassAction depth, uint32_t clearColor, float clearDepth, uint8_t clearStencil);
|
||||
VkImageView BindFramebufferAsTexture(VKRFramebuffer *fb, int binding, int aspectBit, int attachment);
|
||||
@ -292,17 +165,18 @@ public:
|
||||
}
|
||||
|
||||
VkCommandBuffer GetInitCmd();
|
||||
VkRenderPass GetBackbufferRenderpass() const {
|
||||
return backbufferRenderPass_;
|
||||
|
||||
VkRenderPass GetRenderPass(int pass) const {
|
||||
return queueRunner_.GetRenderPass(pass);
|
||||
}
|
||||
VkRenderPass GetRenderPass(int i) const {
|
||||
return renderPasses_[i];
|
||||
VkRenderPass GetBackbufferRenderPass() const {
|
||||
return queueRunner_.GetBackbufferRenderPass();
|
||||
}
|
||||
VkRenderPass GetCompatibleRenderpass() const {
|
||||
VkRenderPass GetCompatibleRenderPass() const {
|
||||
if (curRenderStep_ && curRenderStep_->render.framebuffer != nullptr) {
|
||||
return GetRenderPass(0);
|
||||
return queueRunner_.GetRenderPass(0);
|
||||
} else {
|
||||
return backbufferRenderPass_;
|
||||
return queueRunner_.GetBackbufferRenderPass();
|
||||
}
|
||||
}
|
||||
|
||||
@ -311,30 +185,10 @@ public:
|
||||
|
||||
private:
|
||||
void InitBackbufferFramebuffers(int width, int height);
|
||||
void InitBackbufferRenderPass();
|
||||
void InitRenderpasses();
|
||||
void InitDepthStencilBuffer(VkCommandBuffer cmd); // Used for non-buffered rendering.
|
||||
|
||||
void PerformBindFramebufferAsRenderTarget(const VKRStep &pass, VkCommandBuffer cmd, int swapChainImage);
|
||||
|
||||
void PerformRenderPass(const VKRStep &pass, VkCommandBuffer cmd, int swapChainImage);
|
||||
void PerformCopy(const VKRStep &pass, VkCommandBuffer cmd);
|
||||
void PerformBlit(const VKRStep &pass, VkCommandBuffer cmd);
|
||||
|
||||
inline int RPIndex(VKRRenderPassAction color, VKRRenderPassAction depth) {
|
||||
return (int)depth * 3 + (int)color;
|
||||
}
|
||||
|
||||
static void SetupTransitionToTransferSrc(VKRImage &img, VkImageMemoryBarrier &barrier, VkPipelineStageFlags &stage, VkImageAspectFlags aspect);
|
||||
static void SetupTransitionToTransferDst(VKRImage &img, VkImageMemoryBarrier &barrier, VkPipelineStageFlags &stage, VkImageAspectFlags aspect);
|
||||
|
||||
// Permanent objects
|
||||
VkSemaphore acquireSemaphore_;
|
||||
VkSemaphore renderingCompleteSemaphore_;
|
||||
VkRenderPass backbufferRenderPass_ = VK_NULL_HANDLE;
|
||||
// Renderpasses, all combinations of preserving or clearing or dont-care-ing fb contents.
|
||||
// TODO: Create these on demand.
|
||||
VkRenderPass renderPasses_[9];
|
||||
|
||||
// Per-frame data, round-robin so we can overlap submission with execution of the previous frame.
|
||||
struct FrameData {
|
||||
@ -370,7 +224,7 @@ private:
|
||||
VulkanContext *vulkan_;
|
||||
std::thread thread_;
|
||||
std::mutex mutex_;
|
||||
VkFramebuffer curFramebuffer_ = VK_NULL_HANDLE;
|
||||
VulkanQueueRunner queueRunner_;
|
||||
|
||||
// Swap chain management
|
||||
struct SwapchainImageData {
|
||||
|
@ -448,9 +448,9 @@ public:
|
||||
// Return a representative renderpass.
|
||||
return (uintptr_t)renderManager_.GetRenderPass(0);
|
||||
case NativeObject::BACKBUFFER_RENDERPASS:
|
||||
return (uintptr_t)renderManager_.GetBackbufferRenderpass();
|
||||
return (uintptr_t)renderManager_.GetBackbufferRenderPass();
|
||||
case NativeObject::COMPATIBLE_RENDERPASS:
|
||||
return (uintptr_t)renderManager_.GetCompatibleRenderpass();
|
||||
return (uintptr_t)renderManager_.GetCompatibleRenderPass();
|
||||
case NativeObject::INIT_COMMANDBUFFER:
|
||||
return (uintptr_t)renderManager_.GetInitCmd();
|
||||
case NativeObject::BOUND_TEXTURE_IMAGEVIEW:
|
||||
@ -893,7 +893,7 @@ Pipeline *VKContext::CreateGraphicsPipeline(const PipelineDesc &desc) {
|
||||
info.pViewportState = &vs; // Must set viewport and scissor counts even if we set the actual state dynamically.
|
||||
info.layout = pipelineLayout_;
|
||||
info.subpass = 0;
|
||||
info.renderPass = renderManager_.GetBackbufferRenderpass();
|
||||
info.renderPass = renderManager_.GetBackbufferRenderPass();
|
||||
|
||||
// OK, need to create a new pipeline.
|
||||
VkResult result = vkCreateGraphicsPipelines(device_, pipelineCache_, 1, &info, nullptr, &pipeline->vkpipeline);
|
||||
|
Loading…
x
Reference in New Issue
Block a user