Merge pull request #11766 from hrydgard/vulkan-validation-more

Vulkan: Improve extension loading, implement dedicated_allocation correctly
This commit is contained in:
Henrik Rydgård 2019-02-05 23:47:34 +01:00 committed by GitHub
commit 210e84869f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 461 additions and 188 deletions

View File

@ -60,7 +60,7 @@ static const char *validationLayers[] = {
std::string VulkanVendorString(uint32_t vendorId) {
switch (vendorId) {
case VULKAN_VENDOR_INTEL: return "Intel";
case VULKAN_VENDOR_NVIDIA: return "nVidia";
case VULKAN_VENDOR_NVIDIA: return "NVIDIA";
case VULKAN_VENDOR_AMD: return "AMD";
case VULKAN_VENDOR_ARM: return "ARM";
case VULKAN_VENDOR_QUALCOMM: return "Qualcomm";
@ -77,6 +77,8 @@ const char *PresentModeString(VkPresentModeKHR presentMode) {
case VK_PRESENT_MODE_MAILBOX_KHR: return "MAILBOX";
case VK_PRESENT_MODE_FIFO_KHR: return "FIFO";
case VK_PRESENT_MODE_FIFO_RELAXED_KHR: return "FIFO_RELAXED";
case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR: return "SHARED_DEMAND_REFRESH_KHR";
case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR: return "SHARED_CONTINUOUS_REFRESH_KHR";
default: return "UNKNOWN";
}
}
@ -120,9 +122,6 @@ VkResult VulkanContext::CreateInstance(const CreateInfo &info) {
//#if defined(VK_USE_PLATFORM_XCB_KHR)
// instance_extensions_enabled_.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
//#endif
//#if defined(VK_USE_PLATFORM_MIR_KHR)
// instance_extensions_enabled_.push_back(VK_KHR_MIR_SURFACE_EXTENSION_NAME);
//#endif
#if defined(VK_USE_PLATFORM_WAYLAND_KHR)
if (IsInstanceExtensionAvailable(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME)) {
instance_extensions_enabled_.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
@ -131,18 +130,32 @@ VkResult VulkanContext::CreateInstance(const CreateInfo &info) {
#endif
if (flags_ & VULKAN_FLAG_VALIDATE) {
if (IsInstanceExtensionAvailable(VK_EXT_DEBUG_REPORT_EXTENSION_NAME)) {
if (IsInstanceExtensionAvailable(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) {
// Enable the validation layers
for (size_t i = 0; i < ARRAY_SIZE(validationLayers); i++) {
instance_layer_names_.push_back(validationLayers[i]);
device_layer_names_.push_back(validationLayers[i]);
}
instance_extensions_enabled_.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
extensionsLookup_.EXT_debug_utils = true;
} else if (IsInstanceExtensionAvailable(VK_EXT_DEBUG_REPORT_EXTENSION_NAME)) {
for (size_t i = 0; i < ARRAY_SIZE(validationLayers); i++) {
instance_layer_names_.push_back(validationLayers[i]);
device_layer_names_.push_back(validationLayers[i]);
}
instance_extensions_enabled_.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
extensionsLookup_.EXT_debug_report = true;
} else {
ELOG("Validation layer extension not available - not enabling Vulkan validation.");
flags_ &= ~VULKAN_FLAG_VALIDATE;
}
}
if (IsInstanceExtensionAvailable(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
instance_extensions_enabled_.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
extensionsLookup_.KHR_get_physical_device_properties2 = true;
}
// Validate that all the instance extensions we ask for are actually available.
for (auto ext : instance_extensions_enabled_) {
if (!IsInstanceExtensionAvailable(ext))
@ -190,7 +203,7 @@ VkResult VulkanContext::CreateInstance(const CreateInfo &info) {
return res;
}
VulkanLoadInstanceFunctions(instance_);
VulkanLoadInstanceFunctions(instance_, extensionsLookup_);
if (!CheckLayers(instance_layer_properties_, instance_layer_names_)) {
WLOG("CheckLayers for instance failed");
// init_error_ = "Failed to validate instance layers";
@ -222,8 +235,25 @@ VkResult VulkanContext::CreateInstance(const CreateInfo &info) {
return res;
}
for (uint32_t i = 0; i < gpu_count; i++) {
vkGetPhysicalDeviceProperties(physical_devices_[i], &physicalDeviceProperties_[i]);
if (extensionsLookup_.KHR_get_physical_device_properties2) {
for (uint32_t i = 0; i < gpu_count; i++) {
VkPhysicalDeviceProperties2 props2{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2};
VkPhysicalDevicePushDescriptorPropertiesKHR pushProps{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR};
VkPhysicalDeviceExternalMemoryHostPropertiesEXT extHostMemProps{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT};
props2.pNext = &pushProps;
pushProps.pNext = &extHostMemProps;
vkGetPhysicalDeviceProperties2KHR(physical_devices_[i], &props2);
// Don't want bad pointers sitting around.
props2.pNext = nullptr;
pushProps.pNext = nullptr;
physicalDeviceProperties_[i].properties = props2.properties;
physicalDeviceProperties_[i].pushDescriptorProperties = pushProps;
physicalDeviceProperties_[i].externalMemoryHostProperties = extHostMemProps;
}
} else {
for (uint32_t i = 0; i < gpu_count; i++) {
vkGetPhysicalDeviceProperties(physical_devices_[i], &physicalDeviceProperties_[i].properties);
}
}
return VK_SUCCESS;
}
@ -423,7 +453,7 @@ bool VulkanContext::CheckLayers(const std::vector<LayerProperties> &layer_props,
int VulkanContext::GetPhysicalDeviceByName(std::string name) {
for (size_t i = 0; i < physical_devices_.size(); i++) {
if (physicalDeviceProperties_[i].deviceName == name)
if (physicalDeviceProperties_[i].properties.deviceName == name)
return (int)i;
}
return -1;
@ -520,37 +550,41 @@ void VulkanContext::ChooseDevice(int physical_device) {
}
// Optional features
vkGetPhysicalDeviceFeatures(physical_devices_[physical_device_], &featuresAvailable_);
memset(&featuresEnabled_, 0, sizeof(featuresEnabled_));
if (extensionsLookup_.KHR_get_physical_device_properties2) {
VkPhysicalDeviceFeatures2 features2{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR};
vkGetPhysicalDeviceFeatures2KHR(physical_devices_[physical_device_], &features2);
deviceFeatures_.available = features2.features;
} else {
vkGetPhysicalDeviceFeatures(physical_devices_[physical_device_], &deviceFeatures_.available);
}
deviceFeatures_.enabled = {};
// Enable a few safe ones if they are available.
if (featuresAvailable_.dualSrcBlend) {
featuresEnabled_.dualSrcBlend = true;
if (deviceFeatures_.available.dualSrcBlend) {
deviceFeatures_.enabled.dualSrcBlend = true;
}
if (featuresAvailable_.largePoints) {
featuresEnabled_.largePoints = true;
if (deviceFeatures_.available.largePoints) {
deviceFeatures_.enabled.largePoints = true;
}
if (featuresAvailable_.wideLines) {
featuresEnabled_.wideLines = true;
if (deviceFeatures_.available.wideLines) {
deviceFeatures_.enabled.wideLines = true;
}
if (featuresAvailable_.geometryShader) {
featuresEnabled_.geometryShader = true;
if (deviceFeatures_.available.logicOp) {
deviceFeatures_.enabled.logicOp = true;
}
if (featuresAvailable_.logicOp) {
featuresEnabled_.logicOp = true;
if (deviceFeatures_.available.depthClamp) {
deviceFeatures_.enabled.depthClamp = true;
}
if (featuresAvailable_.depthClamp) {
featuresEnabled_.depthClamp = true;
if (deviceFeatures_.available.depthBounds) {
deviceFeatures_.enabled.depthBounds = true;
}
if (featuresAvailable_.depthBounds) {
featuresEnabled_.depthBounds = true;
}
if (featuresAvailable_.samplerAnisotropy) {
featuresEnabled_.samplerAnisotropy = true;
if (deviceFeatures_.available.samplerAnisotropy) {
deviceFeatures_.enabled.samplerAnisotropy = true;
}
// For easy wireframe mode, someday.
if (featuresEnabled_.fillModeNonSolid) {
featuresEnabled_.fillModeNonSolid = true;
if (deviceFeatures_.available.fillModeNonSolid) {
deviceFeatures_.enabled.fillModeNonSolid = true;
}
GetDeviceLayerExtensionList(nullptr, device_extension_properties_);
@ -574,8 +608,8 @@ VkResult VulkanContext::CreateDevice() {
return VK_ERROR_INITIALIZATION_FAILED;
}
VkDeviceQueueCreateInfo queue_info{ VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO };
float queue_priorities[1] = { 1.0f };
VkDeviceQueueCreateInfo queue_info{VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO};
float queue_priorities[1] = {1.0f};
queue_info.queueCount = 1;
queue_info.pQueuePriorities = queue_priorities;
bool found = false;
@ -588,11 +622,25 @@ VkResult VulkanContext::CreateDevice() {
}
assert(found);
extensionsLookup_.KHR_maintenance1 = EnableDeviceExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
extensionsLookup_.KHR_maintenance2 = EnableDeviceExtension(VK_KHR_MAINTENANCE2_EXTENSION_NAME);
extensionsLookup_.KHR_maintenance3 = EnableDeviceExtension(VK_KHR_MAINTENANCE3_EXTENSION_NAME);
extensionsLookup_.KHR_multiview = EnableDeviceExtension(VK_KHR_MULTIVIEW_EXTENSION_NAME);
if (EnableDeviceExtension(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME)) {
deviceExtensionsLookup_.KHR_get_memory_requirements2 = true;
deviceExtensionsLookup_.KHR_dedicated_allocation = EnableDeviceExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
extensionsLookup_.KHR_get_memory_requirements2 = true;
extensionsLookup_.KHR_dedicated_allocation = EnableDeviceExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
}
deviceExtensionsLookup_.EXT_external_memory_host = EnableDeviceExtension(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME);
if (EnableDeviceExtension(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME)) {
if (EnableDeviceExtension(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME)) {
extensionsLookup_.EXT_external_memory_host = EnableDeviceExtension(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME);
}
}
if (EnableDeviceExtension(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME)) {
extensionsLookup_.KHR_create_renderpass2 = true;
extensionsLookup_.KHR_depth_stencil_resolve = EnableDeviceExtension(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME);
}
extensionsLookup_.EXT_shader_stencil_export = EnableDeviceExtension(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME);
VkDeviceCreateInfo device_info{ VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO };
device_info.queueCreateInfoCount = 1;
@ -601,13 +649,14 @@ VkResult VulkanContext::CreateDevice() {
device_info.ppEnabledLayerNames = device_info.enabledLayerCount ? device_layer_names_.data() : nullptr;
device_info.enabledExtensionCount = (uint32_t)device_extensions_enabled_.size();
device_info.ppEnabledExtensionNames = device_info.enabledExtensionCount ? device_extensions_enabled_.data() : nullptr;
device_info.pEnabledFeatures = &featuresEnabled_;
device_info.pEnabledFeatures = &deviceFeatures_.enabled;
VkResult res = vkCreateDevice(physical_devices_[physical_device_], &device_info, nullptr, &device_);
if (res != VK_SUCCESS) {
init_error_ = "Unable to create Vulkan device";
ELOG("Unable to create Vulkan device");
} else {
VulkanLoadDeviceFunctions(device_);
VulkanLoadDeviceFunctions(device_, extensionsLookup_);
}
ILOG("Device created.\n");
VulkanSetAvailable(true);
@ -623,13 +672,11 @@ VkResult VulkanContext::InitDebugMsgCallback(PFN_vkDebugReportCallbackEXT dbgFun
}
ILOG("Registering debug report callback");
VkDebugReportCallbackCreateInfoEXT cb = {};
cb.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
cb.pNext = nullptr;
VkDebugReportCallbackCreateInfoEXT cb{VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT};
cb.flags = bits;
cb.pfnCallback = dbgFunc;
cb.pUserData = userdata;
VkResult res = dyn_vkCreateDebugReportCallbackEXT(instance_, &cb, nullptr, &msg_callback);
VkResult res = vkCreateDebugReportCallbackEXT(instance_, &cb, nullptr, &msg_callback);
switch (res) {
case VK_SUCCESS:
msg_callbacks.push_back(msg_callback);
@ -643,12 +690,42 @@ VkResult VulkanContext::InitDebugMsgCallback(PFN_vkDebugReportCallbackEXT dbgFun
}
void VulkanContext::DestroyDebugMsgCallback() {
if (!extensionsLookup_.EXT_debug_report)
return;
while (msg_callbacks.size() > 0) {
dyn_vkDestroyDebugReportCallbackEXT(instance_, msg_callbacks.back(), nullptr);
vkDestroyDebugReportCallbackEXT(instance_, msg_callbacks.back(), nullptr);
msg_callbacks.pop_back();
}
}
VkResult VulkanContext::InitDebugUtilsCallback(PFN_vkDebugUtilsMessengerCallbackEXT callback, int bits, void *userdata) {
VkDebugUtilsMessengerCreateInfoEXT callback1{VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT};
callback1.messageSeverity = bits;
callback1.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
callback1.pfnUserCallback = callback;
callback1.pUserData = userdata;
VkDebugUtilsMessengerEXT messenger;
VkResult res = vkCreateDebugUtilsMessengerEXT(instance_, &callback1, nullptr, &messenger);
if (res != VK_SUCCESS) {
ELOG("Failed to register debug callback with vkCreateDebugUtilsMessengerEXT");
// Do error handling for VK_ERROR_OUT_OF_MEMORY
} else {
ILOG("Debug callback registered with vkCreateDebugUtilsMessengerEXT.");
utils_callbacks.push_back(messenger);
}
return res;
}
void VulkanContext::DestroyDebugUtilsCallback() {
if (!extensionsLookup_.EXT_debug_utils)
return;
while (utils_callbacks.size() > 0) {
vkDestroyDebugUtilsMessengerEXT(instance_, utils_callbacks.back(), nullptr);
utils_callbacks.pop_back();
}
}
VkResult VulkanContext::InitSurface(WindowSystem winsys, void *data1, void *data2) {
winsys_ = winsys;
winsysData1_ = data1;
@ -688,7 +765,7 @@ VkResult VulkanContext::ReinitSurface() {
#if defined(VK_USE_PLATFORM_XLIB_KHR)
case WINDOWSYSTEM_XLIB:
{
VkXlibSurfaceCreateInfoKHR xlib = { VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR };
VkXlibSurfaceCreateInfoKHR xlib{ VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR };
xlib.flags = 0;
xlib.dpy = (Display *)winsysData1_;
xlib.window = (Window)winsysData2_;
@ -698,7 +775,7 @@ VkResult VulkanContext::ReinitSurface() {
#if defined(VK_USE_PLATFORM_XCB_KHR)
case WINDOWSYSTEM_XCB:
{
VkXCBSurfaceCreateInfoKHR xcb = { VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR };
VkXCBSurfaceCreateInfoKHR xcb{ VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR };
xcb.flags = 0;
xcb.connection = (Connection *)winsysData1_;
xcb.window = (Window)(uintptr_t)winsysData2_;
@ -708,7 +785,7 @@ VkResult VulkanContext::ReinitSurface() {
#if defined(VK_USE_PLATFORM_WAYLAND_KHR)
case WINDOWSYSTEM_WAYLAND:
{
VkWaylandSurfaceCreateInfoKHR wayland = { VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR };
VkWaylandSurfaceCreateInfoKHR wayland{ VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR };
wayland.flags = 0;
wayland.display = (wl_display *)winsysData1_;
wayland.surface = (wl_surface *)winsysData2_;
@ -836,7 +913,7 @@ bool VulkanContext::InitSwapchain() {
swapChainExtent_.width = clamp(surfCapabilities_.currentExtent.width, surfCapabilities_.minImageExtent.width, surfCapabilities_.maxImageExtent.width);
swapChainExtent_.height = clamp(surfCapabilities_.currentExtent.height, surfCapabilities_.minImageExtent.height, surfCapabilities_.maxImageExtent.height);
if (physicalDeviceProperties_[physical_device_].vendorID == VULKAN_VENDOR_IMGTEC) {
if (physicalDeviceProperties_[physical_device_].properties.vendorID == VULKAN_VENDOR_IMGTEC) {
// Swap chain width hack to avoid issue #11743 (PowerVR driver bug).
swapChainExtent_.width &= ~31;
}
@ -892,7 +969,7 @@ bool VulkanContext::InitSwapchain() {
preTransform = surfCapabilities_.currentTransform;
}
VkSwapchainCreateInfoKHR swap_chain_info = { VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR };
VkSwapchainCreateInfoKHR swap_chain_info{ VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR };
swap_chain_info.surface = surface_;
swap_chain_info.minImageCount = desiredNumberOfSwapChainImages;
swap_chain_info.imageFormat = swapchainFormat_;

View File

@ -101,13 +101,6 @@ private:
std::vector<Callback> callbacks_;
};
// For fast extension-enabled checks.
struct VulkanDeviceExtensions {
bool KHR_get_memory_requirements2;
bool KHR_dedicated_allocation;
bool EXT_external_memory_host;
};
// Useful for debugging on ARM Mali. This eliminates transaction elimination
// which can cause artifacts if you get barriers wrong (or if there are driver bugs).
// Cost is reduced performance on some GPU architectures.
@ -171,7 +164,11 @@ public:
bool MemoryTypeFromProperties(uint32_t typeBits, VkFlags requirements_mask, uint32_t *typeIndex);
VkResult InitDebugMsgCallback(PFN_vkDebugReportCallbackEXT dbgFunc, int bits, void *userdata = nullptr);
VkResult InitDebugUtilsCallback(PFN_vkDebugUtilsMessengerCallbackEXT callback, int bits, void *userdata);
void DestroyDebugUtilsCallback();
// Legacy reporting
VkResult InitDebugMsgCallback(PFN_vkDebugReportCallbackEXT dbgFunc, int bits, void *userdata);
void DestroyDebugMsgCallback();
VkPhysicalDevice GetPhysicalDevice(int n = 0) const {
@ -192,7 +189,15 @@ public:
return graphics_queue_family_index_;
}
const VkPhysicalDeviceProperties &GetPhysicalDeviceProperties(int i) const {
struct PhysicalDeviceProps {
VkPhysicalDeviceProperties properties;
VkPhysicalDevicePushDescriptorPropertiesKHR pushDescriptorProperties;
VkPhysicalDeviceExternalMemoryHostPropertiesEXT externalMemoryHostProperties;
};
const PhysicalDeviceProps &GetPhysicalDeviceProperties(int i = -1) const {
if (i < 0)
i = GetCurrentPhysicalDevice();
return physicalDeviceProperties_[i];
}
@ -208,8 +213,13 @@ public:
const std::vector<const char *> &GetDeviceExtensionsEnabled() const {
return device_extensions_enabled_;
}
const VkPhysicalDeviceFeatures &GetFeaturesAvailable() const { return featuresAvailable_; }
const VkPhysicalDeviceFeatures &GetFeaturesEnabled() const { return featuresEnabled_; }
struct PhysicalDeviceFeatures {
VkPhysicalDeviceFeatures available{};
VkPhysicalDeviceFeatures enabled{};
};
const PhysicalDeviceFeatures &GetDeviceFeatures() const { return deviceFeatures_; }
const VulkanPhysicalDeviceInfo &GetDeviceInfo() const { return deviceInfo_; }
const VkSurfaceCapabilitiesKHR &GetSurfaceCapabilities() const { return surfCapabilities_; }
@ -250,7 +260,7 @@ public:
MAX_INFLIGHT_FRAMES = 3,
};
const VulkanDeviceExtensions &DeviceExtensions() { return deviceExtensionsLookup_; }
const VulkanDeviceExtensions &DeviceExtensions() { return extensionsLookup_; }
private:
// A layer can expose extensions, keep track of those extensions here.
@ -284,14 +294,14 @@ private:
std::vector<const char *> device_extensions_enabled_;
std::vector<VkExtensionProperties> device_extension_properties_;
VulkanDeviceExtensions deviceExtensionsLookup_{};
VulkanDeviceExtensions extensionsLookup_{};
std::vector<VkPhysicalDevice> physical_devices_;
int physical_device_ = -1;
uint32_t graphics_queue_family_index_ = -1;
std::vector<VkPhysicalDeviceProperties> physicalDeviceProperties_{};
std::vector<PhysicalDeviceProps> physicalDeviceProperties_{};
std::vector<VkQueueFamilyProperties> queue_props;
VkPhysicalDeviceMemoryProperties memory_properties{};
@ -317,14 +327,14 @@ private:
VulkanDeleteList globalDeleteList_;
std::vector<VkDebugReportCallbackEXT> msg_callbacks;
std::vector<VkDebugUtilsMessengerEXT> utils_callbacks;
VkSwapchainKHR swapchain_ = VK_NULL_HANDLE;
VkFormat swapchainFormat_;
uint32_t queue_count = 0;
VkPhysicalDeviceFeatures featuresAvailable_{};
VkPhysicalDeviceFeatures featuresEnabled_{};
PhysicalDeviceFeatures deviceFeatures_;
VkSurfaceCapabilitiesKHR surfCapabilities_{};

View File

@ -56,7 +56,7 @@ const char *ObjTypeToString(VkDebugReportObjectTypeEXT type) {
}
}
VkBool32 VKAPI_CALL Vulkan_Dbg(VkDebugReportFlagsEXT msgFlags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject, size_t location, int32_t msgCode, const char* pLayerPrefix, const char* pMsg, void *pUserData) {
VkBool32 VKAPI_CALL VulkanDebugReportCallback(VkDebugReportFlagsEXT msgFlags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject, size_t location, int32_t msgCode, const char* pLayerPrefix, const char* pMsg, void *pUserData) {
const VulkanLogOptions *options = (const VulkanLogOptions *)pUserData;
std::ostringstream message;
@ -73,8 +73,6 @@ VkBool32 VKAPI_CALL Vulkan_Dbg(VkDebugReportFlagsEXT msgFlags, VkDebugReportObje
}
message << "[" << pLayerPrefix << "] " << ObjTypeToString(objType) << " Code " << msgCode << " : " << pMsg << "\n";
if (msgCode == 2) // Useless perf warning ("Vertex attribute at location X not consumed by vertex shader")
return false;
if (msgCode == 64) // Another useless perf warning that will be seen less and less as we optimize - vkCmdClearAttachments() issued on command buffer object 0x00000195296C6D40 prior to any Draw Cmds. It is recommended you use RenderPass LOAD_OP_CLEAR on Attachments prior to any Draw.
return false;
if (msgCode == 5)
@ -105,3 +103,60 @@ VkBool32 VKAPI_CALL Vulkan_Dbg(VkDebugReportFlagsEXT msgFlags, VkDebugReportObje
// keep that behavior here.
return false;
}
VkBool32 VKAPI_CALL VulkanDebugUtilsCallback(
VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
VkDebugUtilsMessageTypeFlagsEXT messageType,
const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData,
void* pUserData) {
const VulkanLogOptions *options = (const VulkanLogOptions *)pUserData;
std::ostringstream message;
const char *pMessage = pCallbackData->pMessage;
int messageCode = pCallbackData->messageIdNumber;
const char *pLayerPrefix = "";
// Ignore perf warnings for now. Could log them, so still want them registered.
if (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) {
message << "ERROR(";
} else if (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) {
message << "WARNING(";
} else if (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT) {
message << "INFO(";
} else if (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT) {
message << "VERBOSE(";
}
if (messageType & VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT) {
message << "perf";
} else if (messageType & VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT) {
message << "general";
} else if (messageType & VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT) {
message << "validation";
}
message << ":" << pCallbackData->messageIdNumber << ") " << pMessage << "\n";
#ifdef _WIN32
std::string msg = message.str();
OutputDebugStringA(msg.c_str());
if (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) {
if (options->breakOnError && IsDebuggerPresent()) {
DebugBreak();
}
if (options->msgBoxOnError) {
MessageBoxA(NULL, pMessage, "Alert", MB_OK);
}
} else if (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) {
if (options->breakOnWarning && IsDebuggerPresent() && 0 == (messageType & VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT)) {
DebugBreak();
}
}
#endif
// false indicates that layer should not bail-out of an
// API call that had validation failures. This may mean that the
// app dies inside the driver due to invalid parameter(s).
// That's what would happen without validation layers, so we'll
// keep that behavior here.
return false;
}

View File

@ -25,4 +25,5 @@ struct VulkanLogOptions {
bool msgBoxOnError;
};
VkBool32 VKAPI_CALL Vulkan_Dbg(VkDebugReportFlagsEXT msgFlags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject, size_t location, int32_t msgCode, const char* pLayerPrefix, const char* pMsg, void *pUserData);
VkBool32 VKAPI_CALL VulkanDebugReportCallback(VkDebugReportFlagsEXT msgFlags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject, size_t location, int32_t msgCode, const char* pLayerPrefix, const char* pMsg, void *pUserData);
VkBool32 VKAPI_CALL VulkanDebugUtilsCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, void *pUserData);

View File

@ -72,9 +72,27 @@ bool VulkanTexture::CreateDirect(VkCommandBuffer cmd, int w, int h, int numMips,
}
VkMemoryRequirements mem_reqs{};
vkGetImageMemoryRequirements(vulkan_->GetDevice(), image_, &mem_reqs);
if (allocator_) {
bool dedicatedAllocation = false;
if (vulkan_->DeviceExtensions().KHR_dedicated_allocation) {
VkImageMemoryRequirementsInfo2KHR memReqInfo2{VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR};
memReqInfo2.image = image_;
VkMemoryRequirements2KHR memReq2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
VkMemoryDedicatedRequirementsKHR memDedicatedReq{VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR};
memReq2.pNext = &memDedicatedReq;
vkGetImageMemoryRequirements2KHR(vulkan_->GetDevice(), &memReqInfo2, &memReq2);
mem_reqs = memReq2.memoryRequirements;
dedicatedAllocation =
(memDedicatedReq.requiresDedicatedAllocation != VK_FALSE) ||
(memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
} else {
vkGetImageMemoryRequirements(vulkan_->GetDevice(), image_, &mem_reqs);
}
if (allocator_ && !dedicatedAllocation) {
offset_ = allocator_->Allocate(mem_reqs, &mem_, Tag());
if (offset_ == VulkanDeviceAllocator::ALLOCATE_FAILED) {
ELOG("Image memory allocation failed (mem_reqs.size=%d, typebits=%08x", (int)mem_reqs.size, (int)mem_reqs.memoryTypeBits);
@ -86,6 +104,12 @@ bool VulkanTexture::CreateDirect(VkCommandBuffer cmd, int w, int h, int numMips,
mem_alloc.memoryTypeIndex = 0;
mem_alloc.allocationSize = mem_reqs.size;
VkMemoryDedicatedAllocateInfoKHR dedicatedAllocateInfo{VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR};
if (dedicatedAllocation) {
dedicatedAllocateInfo.image = image_;
mem_alloc.pNext = &dedicatedAllocateInfo;
}
// Find memory type - don't specify any mapping requirements
bool pass = vulkan_->MemoryTypeFromProperties(mem_reqs.memoryTypeBits, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, &mem_alloc.memoryTypeIndex);
_assert_(pass);

View File

@ -193,12 +193,22 @@ PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR;
PFN_vkQueuePresentKHR vkQueuePresentKHR;
// And the DEBUG_REPORT extension. We dynamically load this.
PFN_vkCreateDebugReportCallbackEXT dyn_vkCreateDebugReportCallbackEXT;
PFN_vkDestroyDebugReportCallbackEXT dyn_vkDestroyDebugReportCallbackEXT;
PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT;
PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT;
PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT;
PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT;
PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT;
PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT;
PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT;
PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT;
PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT;
PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT;
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR;
PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR;
#ifdef _WIN32
static HINSTANCE vulkanLibrary;
@ -260,19 +270,22 @@ bool VulkanMayBeAvailable() {
VkApplicationInfo info{ VK_STRUCTURE_TYPE_APPLICATION_INFO };
std::vector<VkPhysicalDevice> devices;
bool anyGood = false;
const char *instanceExtensions[1]{};
const char *instanceExtensions[2]{};
VkInstance instance = VK_NULL_HANDLE;
VkResult res;
uint32_t physicalDeviceCount = 0;
uint32_t instanceExtCount = 0;
uint32_t instanceExtCount = 1;
bool surfaceExtensionFound = false;
bool platformSurfaceExtensionFound = false;
std::vector<VkExtensionProperties> instanceExts;
instanceExtensions[ci.enabledExtensionCount++] = VK_KHR_SURFACE_EXTENSION_NAME;
#ifdef _WIN32
const char * const surfaceExtension = VK_KHR_WIN32_SURFACE_EXTENSION_NAME;
const char * const platformSurfaceExtension = VK_KHR_WIN32_SURFACE_EXTENSION_NAME;
#elif defined(__ANDROID__)
const char *surfaceExtension = VK_KHR_ANDROID_SURFACE_EXTENSION_NAME;
const char *platformSurfaceExtension = VK_KHR_ANDROID_SURFACE_EXTENSION_NAME;
#else
const char *surfaceExtension = 0;
const char *platformSurfaceExtension = 0;
#endif
if (!localEnumerateInstanceExtensionProperties || !localCreateInstance || !localEnumerate || !localDestroyInstance || !localGetPhysicalDeviceProperties)
@ -295,16 +308,18 @@ bool VulkanMayBeAvailable() {
goto bail;
}
if (surfaceExtension) {
if (platformSurfaceExtension) {
for (auto iter : instanceExts) {
if (!strcmp(iter.extensionName, surfaceExtension)) {
instanceExtensions[0] = surfaceExtension;
ci.enabledExtensionCount = 1;
if (!strcmp(iter.extensionName, platformSurfaceExtension)) {
instanceExtensions[ci.enabledExtensionCount++] = platformSurfaceExtension;
platformSurfaceExtensionFound = true;
break;
} else if (!strcmp(iter.extensionName, VK_KHR_SURFACE_EXTENSION_NAME)) {
surfaceExtensionFound = true;
}
}
if (!ci.enabledExtensionCount) {
ELOG("Surface extension not found");
if (!platformSurfaceExtensionFound || !surfaceExtensionFound) {
ELOG("Platform surface extension not found");
goto bail;
}
}
@ -409,7 +424,7 @@ bool VulkanLoad() {
return true;
}
void VulkanLoadInstanceFunctions(VkInstance instance) {
void VulkanLoadInstanceFunctions(VkInstance instance, const VulkanDeviceExtensions &enabledExtensions) {
// OK, let's use the above functions to get the rest.
LOAD_INSTANCE_FUNC(instance, vkDestroyInstance);
LOAD_INSTANCE_FUNC(instance, vkEnumeratePhysicalDevices);
@ -569,8 +584,25 @@ void VulkanLoadInstanceFunctions(VkInstance instance) {
LOAD_INSTANCE_FUNC(instance, vkDestroySurfaceKHR);
dyn_vkCreateDebugReportCallbackEXT = (PFN_vkCreateDebugReportCallbackEXT)vkGetInstanceProcAddr(instance, "vkCreateDebugReportCallbackEXT");
dyn_vkDestroyDebugReportCallbackEXT = (PFN_vkDestroyDebugReportCallbackEXT)vkGetInstanceProcAddr(instance, "vkDestroyDebugReportCallbackEXT");
if (enabledExtensions.KHR_get_physical_device_properties2) {
LOAD_INSTANCE_FUNC(instance, vkGetPhysicalDeviceProperties2KHR);
LOAD_INSTANCE_FUNC(instance, vkGetPhysicalDeviceFeatures2KHR);
}
if (enabledExtensions.EXT_debug_report) {
LOAD_INSTANCE_FUNC(instance, vkCreateDebugReportCallbackEXT);
LOAD_INSTANCE_FUNC(instance, vkDestroyDebugReportCallbackEXT);
}
if (enabledExtensions.EXT_debug_utils) {
LOAD_INSTANCE_FUNC(instance, vkCreateDebugUtilsMessengerEXT);
LOAD_INSTANCE_FUNC(instance, vkDestroyDebugUtilsMessengerEXT);
LOAD_INSTANCE_FUNC(instance, vkCmdBeginDebugUtilsLabelEXT);
LOAD_INSTANCE_FUNC(instance, vkCmdEndDebugUtilsLabelEXT);
LOAD_INSTANCE_FUNC(instance, vkCmdInsertDebugUtilsLabelEXT);
LOAD_INSTANCE_FUNC(instance, vkSetDebugUtilsObjectNameEXT);
LOAD_INSTANCE_FUNC(instance, vkSetDebugUtilsObjectTagEXT);
}
WLOG("Vulkan instance functions loaded.");
}
@ -578,7 +610,7 @@ void VulkanLoadInstanceFunctions(VkInstance instance) {
// On some implementations, loading functions (that have Device as their first parameter) via vkGetDeviceProcAddr may
// increase performance - but then these function pointers will only work on that specific device. Thus, this loader is not very
// good for multi-device.
void VulkanLoadDeviceFunctions(VkDevice device) {
void VulkanLoadDeviceFunctions(VkDevice device, const VulkanDeviceExtensions &enabledExtensions) {
WLOG("Vulkan device functions loaded.");
// TODO: Move more functions VulkanLoadInstanceFunctions to here.
LOAD_DEVICE_FUNC(device, vkCreateSwapchainKHR);
@ -586,10 +618,13 @@ void VulkanLoadDeviceFunctions(VkDevice device) {
LOAD_DEVICE_FUNC(device, vkGetSwapchainImagesKHR);
LOAD_DEVICE_FUNC(device, vkAcquireNextImageKHR);
LOAD_DEVICE_FUNC(device, vkQueuePresentKHR);
LOAD_DEVICE_FUNC(device, vkGetMemoryHostPointerPropertiesEXT);
LOAD_DEVICE_FUNC(device, vkGetBufferMemoryRequirements2KHR);
LOAD_DEVICE_FUNC(device, vkGetImageMemoryRequirements2KHR);
if (enabledExtensions.EXT_external_memory_host) {
LOAD_DEVICE_FUNC(device, vkGetMemoryHostPointerPropertiesEXT);
}
if (enabledExtensions.KHR_dedicated_allocation) {
LOAD_DEVICE_FUNC(device, vkGetBufferMemoryRequirements2KHR);
LOAD_DEVICE_FUNC(device, vkGetImageMemoryRequirements2KHR);
}
}
void VulkanFree() {

View File

@ -197,20 +197,48 @@ extern PFN_vkQueuePresentKHR vkQueuePresentKHR;
// And the DEBUG_REPORT extension. Since we load this dynamically even in static
// linked mode, we have to rename it :(
extern PFN_vkCreateDebugReportCallbackEXT dyn_vkCreateDebugReportCallbackEXT;
extern PFN_vkDestroyDebugReportCallbackEXT dyn_vkDestroyDebugReportCallbackEXT;
extern PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT;
extern PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT;
extern PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT;
extern PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT;
extern PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT;
extern PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT;
extern PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT;
extern PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT;
extern PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT;
// Assorted other extensions.
extern PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
extern PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
extern PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT;
extern PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR;
extern PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR;
// For fast extension-enabled checks.
struct VulkanDeviceExtensions {
bool EXT_debug_report;
bool EXT_debug_utils;
bool KHR_maintenance1; // required for KHR_create_renderpass2
bool KHR_maintenance2;
bool KHR_maintenance3;
bool KHR_multiview; // required for KHR_create_renderpass2
bool KHR_get_memory_requirements2;
bool KHR_dedicated_allocation;
bool KHR_create_renderpass2;
bool EXT_external_memory_host;
bool KHR_get_physical_device_properties2;
bool KHR_depth_stencil_resolve;
bool EXT_shader_stencil_export;
// bool EXT_depth_range_unrestricted; // Allows depth outside [0.0, 1.0] in 32-bit float depth buffers.
};
// Way to do a quick check before even attempting to load.
bool VulkanMayBeAvailable();
void VulkanSetAvailable(bool available);
bool VulkanLoad();
void VulkanLoadInstanceFunctions(VkInstance instance);
void VulkanLoadDeviceFunctions(VkDevice device);
void VulkanLoadInstanceFunctions(VkInstance instance, const VulkanDeviceExtensions &enabledExtensions);
void VulkanLoadDeviceFunctions(VkDevice device, const VulkanDeviceExtensions &enabledExtensions);
void VulkanFree();

View File

@ -96,9 +96,9 @@ ShaderManagerD3D11::ShaderManagerD3D11(Draw::DrawContext *draw, ID3D11Device *de
memset(&ub_lights, 0, sizeof(ub_lights));
memset(&ub_bones, 0, sizeof(ub_bones));
INFO_LOG(G3D, "sizeof(ub_base): %d", (int)sizeof(ub_base));
INFO_LOG(G3D, "sizeof(ub_lights): %d", (int)sizeof(ub_lights));
INFO_LOG(G3D, "sizeof(ub_bones): %d", (int)sizeof(ub_bones));
static_assert(sizeof(ub_base) <= 512, "ub_base grew too big");
static_assert(sizeof(ub_lights) <= 512, "ub_lights grew too big");
static_assert(sizeof(ub_bones) <= 384, "ub_bones grew too big");
D3D11_BUFFER_DESC desc{sizeof(ub_base), D3D11_USAGE_DYNAMIC, D3D11_BIND_CONSTANT_BUFFER, D3D11_CPU_ACCESS_WRITE };
ASSERT_SUCCESS(device_->CreateBuffer(&desc, nullptr, &push_base));

View File

@ -401,7 +401,6 @@ VkDescriptorSet DrawEngineVulkan::GetOrCreateDescriptorSet(VkImageView imageView
VkDescriptorSet desc;
VkDescriptorSetAllocateInfo descAlloc{ VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO };
descAlloc.pNext = nullptr;
descAlloc.pSetLayouts = &descriptorSetLayout_;
descAlloc.descriptorPool = frame.descPool;
descAlloc.descriptorSetCount = 1;
@ -1023,7 +1022,7 @@ void TessellationDataTransferVulkan::SendDataToShader(const SimpleVertex *const
int size = size_u * size_v;
int ssboAlignment = vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).limits.minStorageBufferOffsetAlignment;
int ssboAlignment = vulkan_->GetPhysicalDeviceProperties().properties.limits.minStorageBufferOffsetAlignment;
uint8_t *data = (uint8_t *)push_->PushAligned(size * sizeof(TessData), (uint32_t *)&bufInfo_[0].offset, &bufInfo_[0].buffer, ssboAlignment);
bufInfo_[0].range = size * sizeof(TessData);

View File

@ -95,7 +95,7 @@ GPU_Vulkan::GPU_Vulkan(GraphicsContext *gfxCtx, Draw::DrawContext *draw)
UpdateVsyncInterval(true);
textureCacheVulkan_->NotifyConfigChanged();
if (vulkan_->GetFeaturesEnabled().wideLines) {
if (vulkan_->GetDeviceFeatures().enabled.wideLines) {
drawEngine_.SetLineWidth(PSP_CoreParameter().renderWidth / 480.0f);
}
@ -183,7 +183,7 @@ void GPU_Vulkan::CheckGPUFeatures() {
features |= GPU_SUPPORTS_VS_RANGE_CULLING;
switch (vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).vendorID) {
switch (vulkan_->GetPhysicalDeviceProperties().properties.vendorID) {
case VULKAN_VENDOR_AMD:
// Accurate depth is required on AMD (due to reverse-Z driver bug) so we ignore the compat flag to disable it on those. See #9545
features |= GPU_SUPPORTS_ACCURATE_DEPTH;
@ -191,7 +191,7 @@ void GPU_Vulkan::CheckGPUFeatures() {
case VULKAN_VENDOR_ARM:
// Also required on older ARM Mali drivers, like the one on many Galaxy S7.
if (!PSP_CoreParameter().compat.flags().DisableAccurateDepth ||
vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).driverVersion <= VK_MAKE_VERSION(428, 811, 2674)) {
vulkan_->GetPhysicalDeviceProperties().properties.driverVersion <= VK_MAKE_VERSION(428, 811, 2674)) {
features |= GPU_SUPPORTS_ACCURATE_DEPTH;
}
break;
@ -217,21 +217,21 @@ void GPU_Vulkan::CheckGPUFeatures() {
features |= GPU_SUPPORTS_VERTEX_TEXTURE_FETCH;
features |= GPU_SUPPORTS_TEXTURE_FLOAT;
if (vulkan_->GetFeaturesEnabled().wideLines) {
if (vulkan_->GetDeviceFeatures().enabled.wideLines) {
features |= GPU_SUPPORTS_WIDE_LINES;
}
if (vulkan_->GetFeaturesEnabled().depthClamp) {
if (vulkan_->GetDeviceFeatures().enabled.depthClamp) {
features |= GPU_SUPPORTS_DEPTH_CLAMP;
}
if (vulkan_->GetFeaturesEnabled().dualSrcBlend) {
if (vulkan_->GetDeviceFeatures().enabled.dualSrcBlend) {
if (!g_Config.bVendorBugChecksEnabled || !draw_->GetBugs().Has(Draw::Bugs::DUAL_SOURCE_BLENDING_BROKEN)) {
features |= GPU_SUPPORTS_DUALSOURCE_BLEND;
}
}
if (vulkan_->GetFeaturesEnabled().logicOp) {
if (vulkan_->GetDeviceFeatures().enabled.logicOp) {
features |= GPU_SUPPORTS_LOGIC_OP;
}
if (vulkan_->GetFeaturesEnabled().samplerAnisotropy) {
if (vulkan_->GetDeviceFeatures().enabled.samplerAnisotropy) {
features |= GPU_SUPPORTS_ANISOTROPY;
}
@ -264,7 +264,7 @@ void GPU_Vulkan::BeginHostFrame() {
framebufferManager_->Resized();
drawEngine_.Resized();
textureCacheVulkan_->NotifyConfigChanged();
if (vulkan_->GetFeaturesEnabled ().wideLines) {
if (vulkan_->GetDeviceFeatures().enabled.wideLines) {
drawEngine_.SetLineWidth(PSP_CoreParameter().renderWidth / 480.0f);
}
}
@ -311,8 +311,8 @@ void GPU_Vulkan::EndHostFrame() {
// Needs to be called on GPU thread, not reporting thread.
void GPU_Vulkan::BuildReportingInfo() {
const auto &props = vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice());
const auto &features = vulkan_->GetFeaturesAvailable();
const auto &props = vulkan_->GetPhysicalDeviceProperties().properties;
const auto &features = vulkan_->GetDeviceFeatures().available;
#define CHECK_BOOL_FEATURE(n) do { if (features.n) { featureNames += ", " #n; } } while (false)

View File

@ -112,10 +112,12 @@ static int SetupVertexAttribs(VkVertexInputAttributeDescription attrs[], const D
return count;
}
static int SetupVertexAttribsPretransformed(VkVertexInputAttributeDescription attrs[], bool needsColor1) {
static int SetupVertexAttribsPretransformed(VkVertexInputAttributeDescription attrs[], bool needsUV, bool needsColor1) {
int count = 0;
VertexAttribSetup(&attrs[count++], DEC_FLOAT_4, 0, PspAttributeLocation::POSITION);
VertexAttribSetup(&attrs[count++], DEC_FLOAT_3, 16, PspAttributeLocation::TEXCOORD);
if (needsUV) {
VertexAttribSetup(&attrs[count++], DEC_FLOAT_3, 16, PspAttributeLocation::TEXCOORD);
}
VertexAttribSetup(&attrs[count++], DEC_U8_4, 28, PspAttributeLocation::COLOR0);
if (needsColor1) {
VertexAttribSetup(&attrs[count++], DEC_U8_4, 32, PspAttributeLocation::COLOR1);
@ -235,7 +237,6 @@ static VulkanPipeline *CreateVulkanPipeline(VkDevice device, VkPipelineCache pip
inputAssembly.flags = 0;
inputAssembly.topology = (VkPrimitiveTopology)key.topology;
inputAssembly.primitiveRestartEnable = false;
int vertexStride = 0;
int offset = 0;
@ -245,8 +246,9 @@ static VulkanPipeline *CreateVulkanPipeline(VkDevice device, VkPipelineCache pip
attributeCount = SetupVertexAttribs(attrs, *decFmt);
vertexStride = decFmt->stride;
} else {
bool needsUV = vs->GetID().Bit(VS_BIT_DO_TEXTURE);
bool needsColor1 = vs->GetID().Bit(VS_BIT_LMODE);
attributeCount = SetupVertexAttribsPretransformed(attrs, needsColor1);
attributeCount = SetupVertexAttribsPretransformed(attrs, needsUV, needsColor1);
vertexStride = 36;
}
@ -679,7 +681,7 @@ bool PipelineManagerVulkan::LoadCache(FILE *file, bool loadRawPipelineCache, Sha
WARN_LOG(G3D, "Bad Vulkan pipeline cache header - ignoring");
return false;
}
if (0 != memcmp(header->uuid, vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).pipelineCacheUUID, VK_UUID_SIZE)) {
if (0 != memcmp(header->uuid, vulkan_->GetPhysicalDeviceProperties().properties.pipelineCacheUUID, VK_UUID_SIZE)) {
// Wrong hardware/driver/etc.
WARN_LOG(G3D, "Bad Vulkan pipeline cache UUID - ignoring");
return false;

View File

@ -160,14 +160,14 @@ std::string VulkanVertexShader::GetShaderString(DebugShaderStringType type) cons
ShaderManagerVulkan::ShaderManagerVulkan(Draw::DrawContext *draw, VulkanContext *vulkan)
: ShaderManagerCommon(draw), vulkan_(vulkan), lastVShader_(nullptr), lastFShader_(nullptr), fsCache_(16), vsCache_(16) {
codeBuffer_ = new char[16384];
uboAlignment_ = vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).limits.minUniformBufferOffsetAlignment;
uboAlignment_ = vulkan_->GetPhysicalDeviceProperties().properties.limits.minUniformBufferOffsetAlignment;
memset(&ub_base, 0, sizeof(ub_base));
memset(&ub_lights, 0, sizeof(ub_lights));
memset(&ub_bones, 0, sizeof(ub_bones));
ILOG("sizeof(ub_base): %d", (int)sizeof(ub_base));
ILOG("sizeof(ub_lights): %d", (int)sizeof(ub_lights));
ILOG("sizeof(ub_bones): %d", (int)sizeof(ub_bones));
static_assert(sizeof(ub_base) <= 512, "ub_base grew too big");
static_assert(sizeof(ub_lights) <= 512, "ub_lights grew too big");
static_assert(sizeof(ub_bones) <= 384, "ub_bones grew too big");
}
ShaderManagerVulkan::~ShaderManagerVulkan() {
@ -178,7 +178,7 @@ ShaderManagerVulkan::~ShaderManagerVulkan() {
void ShaderManagerVulkan::DeviceRestore(VulkanContext *vulkan, Draw::DrawContext *draw) {
vulkan_ = vulkan;
draw_ = draw;
uboAlignment_ = vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).limits.minUniformBufferOffsetAlignment;
uboAlignment_ = vulkan_->GetPhysicalDeviceProperties().properties.limits.minUniformBufferOffsetAlignment;
}
void ShaderManagerVulkan::Clear() {
@ -270,7 +270,7 @@ void ShaderManagerVulkan::GetShaders(int prim, u32 vertType, VulkanVertexShader
VulkanFragmentShader *fs = fsCache_.Get(FSID);
if (!fs) {
uint32_t vendorID = vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).vendorID;
uint32_t vendorID = vulkan_->GetPhysicalDeviceProperties().properties.vendorID;
// Fragment shader not in cache. Let's compile it.
GenerateVulkanGLSLFragmentShader(FSID, codeBuffer_, vendorID);
fs = new VulkanFragmentShader(vulkan_, FSID, codeBuffer_);
@ -392,7 +392,7 @@ bool ShaderManagerVulkan::LoadCache(FILE *f) {
VulkanVertexShader *vs = new VulkanVertexShader(vulkan_, id, codeBuffer_, useHWTransform);
vsCache_.Insert(id, vs);
}
uint32_t vendorID = vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).vendorID;
uint32_t vendorID = vulkan_->GetPhysicalDeviceProperties().properties.vendorID;
for (int i = 0; i < header.numFragmentShaders; i++) {
FShaderID id;
if (fread(&id, sizeof(id), 1, f) != 1) {

View File

@ -237,7 +237,7 @@ void DrawEngineVulkan::ConvertStateToVulkanKey(FramebufferManagerVulkan &fbManag
key.colorWriteMask = (rmask ? VK_COLOR_COMPONENT_R_BIT : 0) | (gmask ? VK_COLOR_COMPONENT_G_BIT : 0) | (bmask ? VK_COLOR_COMPONENT_B_BIT : 0) | (amask ? VK_COLOR_COMPONENT_A_BIT : 0);
// Workaround proposed in #10421, for bug where the color write mask is not applied correctly on Adreno.
if ((gstate.pmskc & 0x00FFFFFF) == 0x00FFFFFF && vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).vendorID == VULKAN_VENDOR_QUALCOMM) {
if ((gstate.pmskc & 0x00FFFFFF) == 0x00FFFFFF && vulkan_->GetPhysicalDeviceProperties().properties.vendorID == VULKAN_VENDOR_QUALCOMM) {
key.colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT;
if (!key.blendEnable) {
key.blendEnable = true;

View File

@ -146,7 +146,7 @@ bool FramebufferManagerVulkan::NotifyStencilUpload(u32 addr, int size, bool skip
if (!stencilVs_) {
const char *stencil_fs_source = stencil_fs;
// See comment above the stencil_fs_adreno definition.
if (vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).vendorID == VULKAN_VENDOR_QUALCOMM)
if (vulkan_->GetPhysicalDeviceProperties().properties.vendorID == VULKAN_VENDOR_QUALCOMM)
stencil_fs_source = stencil_fs_adreno;
stencilVs_ = CompileShaderModule(vulkan_, VK_SHADER_STAGE_VERTEX_BIT, stencil_vs, &error);

View File

@ -683,7 +683,7 @@ void TextureCacheVulkan::BuildTexture(TexCacheEntry *const entry) {
uint32_t bufferOffset;
VkBuffer texBuf;
// nvidia returns 1 but that can't be healthy... let's align by 16 as a minimum.
int pushAlignment = std::max(16, (int)vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).limits.optimalBufferCopyOffsetAlignment);
int pushAlignment = std::max(16, (int)vulkan_->GetPhysicalDeviceProperties().properties.limits.optimalBufferCopyOffsetAlignment);
void *data = drawEngine_->GetPushBufferForTextureData()->PushAligned(size, &bufferOffset, &texBuf, pushAlignment);
if (replaced.Valid()) {
replaced.Load(i, data, stride);

View File

@ -160,12 +160,11 @@ VkDescriptorSet Vulkan2D::GetDescriptorSet(VkImageView tex1, VkSampler sampler1,
assert(result == VK_SUCCESS);
// We just don't write to the slots we don't care about.
VkWriteDescriptorSet writes[2];
memset(writes, 0, sizeof(writes));
VkWriteDescriptorSet writes[2]{};
// Main and sub textures
int n = 0;
VkDescriptorImageInfo image1 = {};
VkDescriptorImageInfo image2 = {};
VkDescriptorImageInfo image1{};
VkDescriptorImageInfo image2{};
if (tex1) {
#ifdef VULKAN_USE_GENERAL_LAYOUT_FOR_COLOR
image1.imageLayout = VK_IMAGE_LAYOUT_GENERAL;

View File

@ -390,6 +390,7 @@
<ClCompile Include="..\..\ext\glslang\SPIRV\Logger.cpp" />
<ClCompile Include="..\..\ext\glslang\SPIRV\SpvBuilder.cpp" />
<ClCompile Include="..\..\ext\glslang\SPIRV\SPVRemapper.cpp" />
<ClCompile Include="..\..\ext\glslang\SPIRV\SpvPostProcess.cpp" />
<ClCompile Include="pch.cpp">
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">Create</PrecompiledHeader>
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">Create</PrecompiledHeader>
@ -405,4 +406,4 @@
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>
</Project>

View File

@ -114,7 +114,7 @@ bool WindowsVulkanContext::Init(HINSTANCE hInst, HWND hWnd, std::string *error_m
if (deviceNum < 0) {
deviceNum = g_Vulkan->GetBestPhysicalDevice();
if (!g_Config.sVulkanDevice.empty())
g_Config.sVulkanDevice = g_Vulkan->GetPhysicalDeviceProperties(deviceNum).deviceName;
g_Config.sVulkanDevice = g_Vulkan->GetPhysicalDeviceProperties(deviceNum).properties.deviceName;
}
g_Vulkan->ChooseDevice(deviceNum);
if (g_Vulkan->CreateDevice() != VK_SUCCESS) {
@ -124,8 +124,17 @@ bool WindowsVulkanContext::Init(HINSTANCE hInst, HWND hWnd, std::string *error_m
return false;
}
if (g_validate_) {
int bits = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
g_Vulkan->InitDebugMsgCallback(&Vulkan_Dbg, bits, &g_LogOptions);
if (g_Vulkan->DeviceExtensions().EXT_debug_utils) {
int bits = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
| VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT
| VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT
| VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT
| VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
g_Vulkan->InitDebugUtilsCallback(&VulkanDebugUtilsCallback, bits, &g_LogOptions);
} else {
int bits = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
g_Vulkan->InitDebugMsgCallback(&VulkanDebugReportCallback, bits, &g_LogOptions);
}
}
g_Vulkan->InitSurface(WINDOWSYSTEM_WIN32, (void *)hInst, (void *)hWnd);
if (!g_Vulkan->InitObjects()) {
@ -137,7 +146,7 @@ bool WindowsVulkanContext::Init(HINSTANCE hInst, HWND hWnd, std::string *error_m
bool splitSubmit = g_Config.bGfxDebugSplitSubmit;
draw_ = Draw::T3DCreateVulkanContext(g_Vulkan, splitSubmit);
SetGPUBackend(GPUBackend::VULKAN, g_Vulkan->GetPhysicalDeviceProperties(deviceNum).deviceName);
SetGPUBackend(GPUBackend::VULKAN, g_Vulkan->GetPhysicalDeviceProperties(deviceNum).properties.deviceName);
bool success = draw_->CreatePresets();
_assert_msg_(G3D, success, "Failed to compile preset shaders");
draw_->HandleEvent(Draw::Event::GOT_BACKBUFFER, g_Vulkan->GetBackbufferWidth(), g_Vulkan->GetBackbufferHeight());
@ -160,6 +169,7 @@ void WindowsVulkanContext::Shutdown() {
g_Vulkan->WaitUntilQueueIdle();
g_Vulkan->DestroyObjects();
g_Vulkan->DestroyDevice();
g_Vulkan->DestroyDebugUtilsCallback();
g_Vulkan->DestroyDebugMsgCallback();
g_Vulkan->DestroyInstance();

View File

@ -160,6 +160,7 @@ bool AndroidVulkanContext::InitFromRenderThread(ANativeWindow *wnd, int desiredB
if (!success) {
g_Vulkan->DestroyObjects();
g_Vulkan->DestroyDevice();
g_Vulkan->DestroyDebugUtilsCallback();
g_Vulkan->DestroyDebugMsgCallback();
g_Vulkan->DestroyInstance();
@ -181,6 +182,7 @@ void AndroidVulkanContext::ShutdownFromRenderThread() {
void AndroidVulkanContext::Shutdown() {
ILOG("Calling NativeShutdownGraphics");
g_Vulkan->DestroyDevice();
g_Vulkan->DestroyDebugUtilsCallback();
g_Vulkan->DestroyDebugMsgCallback();
g_Vulkan->DestroyInstance();

@ -1 +1 @@
Subproject commit 29619b2312f7bc862221749f3f4d37c3e6a0dee2
Subproject commit 9983f99e87ab0b6608b236ea59bcf873f90e1435

View File

@ -50,6 +50,7 @@ LOCAL_SRC_FILES := \
../glslang/SPIRV/InReadableOrder.cpp \
../glslang/SPIRV/SpvBuilder.cpp \
../glslang/SPIRV/SPVRemapper.cpp \
../glslang/SPIRV/SpvPostProcess.cpp \
../glslang/OGLCompilersDLL/InitializeDll.cpp

View File

@ -193,6 +193,7 @@
<ClCompile Include="glslang\SPIRV\InReadableOrder.cpp" />
<ClCompile Include="glslang\SPIRV\Logger.cpp" />
<ClCompile Include="glslang\SPIRV\SpvBuilder.cpp" />
<ClCompile Include="glslang\SPIRV\SpvPostProcess.cpp" />
<ClCompile Include="glslang\SPIRV\SPVRemapper.cpp" />
</ItemGroup>
<ItemGroup>

View File

@ -140,6 +140,9 @@
<ClCompile Include="glslang\glslang\MachineIndependent\attribute.cpp">
<Filter>glslang</Filter>
</ClCompile>
<ClCompile Include="glslang\SPIRV\SpvPostProcess.cpp">
<Filter>SPIRV</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<ClInclude Include="glslang\SPIRV\disassemble.h">

View File

@ -865,7 +865,7 @@ void VulkanQueueRunner::PerformBindFramebufferAsRenderTarget(const VKRStep &step
// See pull request #10723.
bool maliBugWorkaround = step.render.numDraws == 0 &&
step.render.color == VKRRenderPassAction::CLEAR &&
vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).driverVersion == 0xaa9c4b29;
vulkan_->GetPhysicalDeviceProperties().properties.driverVersion == 0xaa9c4b29;
if (maliBugWorkaround) {
TransitionImageLayout2(cmd, step.render.framebuffer->color.image, 0, 1, VK_IMAGE_ASPECT_COLOR_BIT,
fb->color.layout, VK_IMAGE_LAYOUT_GENERAL,

View File

@ -42,24 +42,39 @@ void CreateImage(VulkanContext *vulkan, VkCommandBuffer cmd, VKRImage &img, int
vkCreateImage(vulkan->GetDevice(), &ici, nullptr, &img.image);
VkMemoryRequirements memreq;
vkGetImageMemoryRequirements(vulkan->GetDevice(), img.image, &memreq);
bool dedicatedAllocation = false;
if (vulkan->DeviceExtensions().KHR_dedicated_allocation) {
VkImageMemoryRequirementsInfo2KHR memReqInfo2{VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR};
memReqInfo2.image = img.image;
VkMemoryRequirements2KHR memReq2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
VkMemoryDedicatedRequirementsKHR memDedicatedReq{VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR};
memReq2.pNext = &memDedicatedReq;
vkGetImageMemoryRequirements2KHR(vulkan->GetDevice(), &memReqInfo2, &memReq2);
memreq = memReq2.memoryRequirements;
dedicatedAllocation =
(memDedicatedReq.requiresDedicatedAllocation != VK_FALSE) ||
(memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
} else {
vkGetImageMemoryRequirements(vulkan->GetDevice(), img.image, &memreq);
}
VkMemoryAllocateInfo alloc{ VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
alloc.allocationSize = memreq.size;
/*
// Hint to the driver that this allocation is image-specific. Some drivers benefit.
// We only bother supporting the KHR extension, not the old NV one.
VkMemoryDedicatedAllocateInfoKHR dedicated{ VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
if (vulkan->DeviceExtensions().DEDICATED_ALLOCATION) {
alloc.pNext = &dedicated;
dedicated.image = img.image;
VkMemoryDedicatedAllocateInfoKHR dedicatedAllocateInfo{VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR};
if (dedicatedAllocation) {
dedicatedAllocateInfo.image = img.image;
alloc.pNext = &dedicatedAllocateInfo;
}
*/
vulkan->MemoryTypeFromProperties(memreq.memoryTypeBits, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, &alloc.memoryTypeIndex);
VkResult res = vkAllocateMemory(vulkan->GetDevice(), &alloc, nullptr, &img.memory);
assert(res == VK_SUCCESS);
res = vkBindImageMemory(vulkan->GetDevice(), img.image, img.memory, 0);
assert(res == VK_SUCCESS);
@ -138,7 +153,7 @@ VulkanRenderManager::VulkanRenderManager(VulkanContext *vulkan) : vulkan_(vulkan
queueRunner_.CreateDeviceObjects();
// Temporary AMD hack for issue #10097
if (vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).vendorID == VULKAN_VENDOR_AMD) {
if (vulkan_->GetPhysicalDeviceProperties().properties.vendorID == VULKAN_VENDOR_AMD) {
useThread_ = false;
}
}
@ -554,10 +569,6 @@ bool VulkanRenderManager::InitDepthStencilBuffer(VkCommandBuffer cmd) {
image_info.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
image_info.flags = 0;
VkMemoryAllocateInfo mem_alloc = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
mem_alloc.allocationSize = 0;
mem_alloc.memoryTypeIndex = 0;
VkMemoryRequirements mem_reqs;
depth_.format = depth_format;
@ -568,9 +579,36 @@ bool VulkanRenderManager::InitDepthStencilBuffer(VkCommandBuffer cmd) {
if (res != VK_SUCCESS)
return false;
vkGetImageMemoryRequirements(device, depth_.image, &mem_reqs);
bool dedicatedAllocation = false;
if (vulkan_->DeviceExtensions().KHR_dedicated_allocation) {
VkImageMemoryRequirementsInfo2KHR memReqInfo2{VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR};
memReqInfo2.image = depth_.image;
VkMemoryRequirements2KHR memReq2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
VkMemoryDedicatedRequirementsKHR memDedicatedReq{VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR};
memReq2.pNext = &memDedicatedReq;
vkGetImageMemoryRequirements2KHR(vulkan_->GetDevice(), &memReqInfo2, &memReq2);
mem_reqs = memReq2.memoryRequirements;
dedicatedAllocation =
(memDedicatedReq.requiresDedicatedAllocation != VK_FALSE) ||
(memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
} else {
vkGetImageMemoryRequirements(vulkan_->GetDevice(), depth_.image, &mem_reqs);
}
VkMemoryAllocateInfo mem_alloc = {VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO};
mem_alloc.allocationSize = mem_reqs.size;
mem_alloc.memoryTypeIndex = 0;
VkMemoryDedicatedAllocateInfoKHR dedicatedAllocateInfo{VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR};
if (dedicatedAllocation) {
dedicatedAllocateInfo.image = depth_.image;
mem_alloc.pNext = &dedicatedAllocateInfo;
}
// Use the memory properties to determine the type of memory required
pass = vulkan_->MemoryTypeFromProperties(mem_reqs.memoryTypeBits,
0, /* No requirements */

View File

@ -655,13 +655,6 @@ public:
virtual void HandleEvent(Event ev, int width, int height, void *param1 = nullptr, void *param2 = nullptr) = 0;
// This flushes command buffers and waits for execution at the point of the end of the last
// renderpass that wrote to the requested framebuffer. This is needed before trying to read it back
// on modern APIs like Vulkan. Ifr the framebuffer is currently being rendered to, we'll just end the render pass.
// The next draw call will automatically start up a new one.
// APIs like OpenGL won't need to implement this one.
virtual void WaitRenderCompletion(Framebuffer *fbo) {}
// Flush state like scissors etc so the caller can do its own custom drawing.
virtual void FlushState() {}

View File

@ -260,7 +260,7 @@ public:
// Returns the binding offset, and the VkBuffer to bind.
size_t PushUBO(VulkanPushBuffer *buf, VulkanContext *vulkan, VkBuffer *vkbuf) {
return buf->PushAligned(ubo_, uboSize_, vulkan->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).limits.minUniformBufferOffsetAlignment, vkbuf);
return buf->PushAligned(ubo_, uboSize_, vulkan->GetPhysicalDeviceProperties().properties.limits.minUniformBufferOffsetAlignment, vkbuf);
}
int GetUniformLoc(const char *name);
@ -352,7 +352,7 @@ public:
std::vector<std::string> GetDeviceList() const override {
std::vector<std::string> list;
for (int i = 0; i < vulkan_->GetNumPhysicalDevices(); i++) {
list.push_back(vulkan_->GetPhysicalDeviceProperties(i).deviceName);
list.push_back(vulkan_->GetPhysicalDeviceProperties(i).properties.deviceName);
}
return list;
}
@ -427,13 +427,11 @@ public:
void EndFrame() override;
void WipeQueue() override;
void FlushState() override {
}
void WaitRenderCompletion(Framebuffer *fbo) override;
void FlushState() override {}
// From Sascha's code
static std::string FormatDriverVersion(const VkPhysicalDeviceProperties &props) {
if (props.vendorID == 4318) {
if (props.vendorID == VULKAN_VENDOR_NVIDIA) {
// 10 bits = major version (up to r1023)
// 8 bits = minor version (up to 255)
// 8 bits = secondary branch version/build version (up to 255)
@ -443,6 +441,9 @@ public:
uint32_t secondaryBranch = (props.driverVersion >> 6) & 0x0ff;
uint32_t tertiaryBranch = (props.driverVersion) & 0x003f;
return StringFromFormat("%d.%d.%d.%d (%08x)", major, minor, secondaryBranch, tertiaryBranch, props.driverVersion);
} else if (props.vendorID == VULKAN_VENDOR_ARM) {
// ARM just puts a hash here, let's just output it as is.
return StringFromFormat("%08x", props.driverVersion);
} else {
// Standard scheme, use the standard macros.
uint32_t major = VK_VERSION_MAJOR(props.driverVersion);
@ -456,13 +457,13 @@ public:
// TODO: Make these actually query the right information
switch (info) {
case APINAME: return "Vulkan";
case VENDORSTRING: return vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).deviceName;
case VENDOR: return VulkanVendorString(vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).vendorID);
case DRIVER: return FormatDriverVersion(vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()));
case VENDORSTRING: return vulkan_->GetPhysicalDeviceProperties().properties.deviceName;
case VENDOR: return VulkanVendorString(vulkan_->GetPhysicalDeviceProperties().properties.vendorID);
case DRIVER: return FormatDriverVersion(vulkan_->GetPhysicalDeviceProperties().properties);
case SHADELANGVERSION: return "N/A";;
case APIVERSION:
{
uint32_t ver = vulkan_->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).apiVersion;
uint32_t ver = vulkan_->GetPhysicalDeviceProperties().properties.apiVersion;
return StringFromFormat("%d.%d.%d", ver >> 22, (ver >> 12) & 0x3ff, ver & 0xfff);
}
default: return "?";
@ -750,19 +751,19 @@ bool VKTexture::Create(VkCommandBuffer cmd, VulkanPushBuffer *push, const Textur
VKContext::VKContext(VulkanContext *vulkan, bool splitSubmit)
: vulkan_(vulkan), caps_{}, renderManager_(vulkan) {
caps_.anisoSupported = vulkan->GetFeaturesAvailable().samplerAnisotropy != 0;
caps_.geometryShaderSupported = vulkan->GetFeaturesAvailable().geometryShader != 0;
caps_.tesselationShaderSupported = vulkan->GetFeaturesAvailable().tessellationShader != 0;
caps_.multiViewport = vulkan->GetFeaturesAvailable().multiViewport != 0;
caps_.dualSourceBlend = vulkan->GetFeaturesAvailable().dualSrcBlend != 0;
caps_.depthClampSupported = vulkan->GetFeaturesAvailable().depthClamp != 0;
caps_.anisoSupported = vulkan->GetDeviceFeatures().enabled.samplerAnisotropy != 0;
caps_.geometryShaderSupported = vulkan->GetDeviceFeatures().enabled.geometryShader != 0;
caps_.tesselationShaderSupported = vulkan->GetDeviceFeatures().enabled.tessellationShader != 0;
caps_.multiViewport = vulkan->GetDeviceFeatures().enabled.multiViewport != 0;
caps_.dualSourceBlend = vulkan->GetDeviceFeatures().enabled.dualSrcBlend != 0;
caps_.depthClampSupported = vulkan->GetDeviceFeatures().enabled.depthClamp != 0;
caps_.framebufferBlitSupported = true;
caps_.framebufferCopySupported = true;
caps_.framebufferDepthBlitSupported = false; // Can be checked for.
caps_.framebufferDepthCopySupported = true; // Will pretty much always be the case.
caps_.preferredDepthBufferFormat = DataFormat::D24_S8; // TODO: Ask vulkan.
auto deviceProps = vulkan->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice());
auto deviceProps = vulkan->GetPhysicalDeviceProperties(vulkan_->GetCurrentPhysicalDevice()).properties;
switch (deviceProps.vendorID) {
case VULKAN_VENDOR_AMD: caps_.vendor = GPUVendor::VENDOR_AMD; break;
case VULKAN_VENDOR_ARM: caps_.vendor = GPUVendor::VENDOR_ARM; break;
@ -891,10 +892,6 @@ void VKContext::BeginFrame() {
_assert_(result == VK_SUCCESS);
}
void VKContext::WaitRenderCompletion(Framebuffer *fbo) {
// TODO
}
void VKContext::EndFrame() {
// Stop collecting data in the frame's data pushbuffer.
push_->End();
@ -1011,13 +1008,11 @@ Pipeline *VKContext::CreateGraphicsPipeline(const PipelineDesc &desc) {
dynamicInfo.dynamicStateCount = depth->info.stencilTestEnable ? ARRAY_SIZE(dynamics) : 2;
dynamicInfo.pDynamicStates = dynamics;
VkPipelineMultisampleStateCreateInfo ms = { VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO };
ms.pNext = nullptr;
VkPipelineMultisampleStateCreateInfo ms{ VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO };
ms.pSampleMask = nullptr;
ms.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
VkPipelineViewportStateCreateInfo vs = { VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO };
vs.pNext = nullptr;
VkPipelineViewportStateCreateInfo vs{ VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO };
vs.viewportCount = 1;
vs.scissorCount = 1;
vs.pViewports = nullptr; // dynamic
@ -1026,8 +1021,7 @@ Pipeline *VKContext::CreateGraphicsPipeline(const PipelineDesc &desc) {
VkPipelineRasterizationStateCreateInfo rs{ VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO };
raster->ToVulkan(&rs);
VkGraphicsPipelineCreateInfo info = { VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO };
info.pNext = nullptr;
VkGraphicsPipelineCreateInfo info{ VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO };
info.flags = 0;
info.stageCount = (uint32_t)stages.size();
info.pStages = stages.data();
@ -1319,8 +1313,8 @@ static const char *VulkanFormatToString(VkFormat fmt) {
}
std::vector<std::string> VKContext::GetFeatureList() const {
const VkPhysicalDeviceFeatures &available = vulkan_->GetFeaturesAvailable();
const VkPhysicalDeviceFeatures &enabled = vulkan_->GetFeaturesEnabled();
const VkPhysicalDeviceFeatures &available = vulkan_->GetDeviceFeatures().available;
const VkPhysicalDeviceFeatures &enabled = vulkan_->GetDeviceFeatures().enabled;
std::vector<std::string> features;
AddFeature(features, "dualSrcBlend", available.dualSrcBlend, enabled.dualSrcBlend);